From 647f513391cfc2fe84c599fd7011ca896a319a00 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 5 Sep 2023 16:08:00 +0530 Subject: [PATCH 01/67] consider version of objects as part of fqn during serde --- packages/syft/src/syft/serde/recursive.py | 5 ++++- packages/syft/src/syft/util/util.py | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index fa01f91c58c..47d151890ad 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -94,7 +94,10 @@ def recursive_serde_register( alias_fqn = check_fqn_alias(cls) cls = type(cls) if not isinstance(cls, type) else cls - fqn = f"{cls.__module__}.{cls.__name__}" + if hasattr(cls, "__version__"): + fqn = f"{cls.__module__}.{cls.__name__}.{cls.__version__}" + else: + fqn = f"{cls.__module__}.{cls.__name__}" nonrecursive = bool(serialize and deserialize) _serialize = serialize if nonrecursive else rs_object2proto diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 8f76f6de5be..1089ef7f102 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -147,6 +147,9 @@ def get_fully_qualified_name(obj: object) -> str: fqn += "." + obj.__class__.__name__ except Exception as e: error(f"Failed to get FQN: {e}") + + if hasattr(obj, "__version__"): + fqn += "." + str(obj.__version__) return fqn From 30641eb4db970b409edca38dcc973a2cba51fcb5 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 5 Sep 2023 16:36:39 +0530 Subject: [PATCH 02/67] add a different version of node metadata --- .../syft/service/metadata/node_metadata.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index d3b03e71c1a..a27fe8a704e 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -15,6 +15,7 @@ from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import StorableObjectType from ...types.syft_object import SyftObject from ...types.transforms import convert_types @@ -88,6 +89,35 @@ def check_version(self, client_version: str) -> bool: ) +@serializable() +class NodeMetadataV2(SyftObject): + __canonical_name__ = "NodeMetadata" + __version__ = SYFT_OBJECT_VERSION_2 + + name: str + highest_version: int + lowest_version: int + id: UID + verify_key: SyftVerifyKey + syft_version: str + node_type: NodeType = NodeType.DOMAIN + deployed_on: str = "Date" + organization: str = "OpenMined" + on_board: bool = False + description: str = "Text" + signup_enabled: bool + admin_email: str + node_side_type: str + show_warnings: bool + + def check_version(self, client_version: str) -> bool: + return check_version( + client_version=client_version, + server_version=self.syft_version, + server_name=self.name, + ) + + @serializable() class NodeMetadataJSON(BaseModel, StorableObjectType): metadata_version: int From 91c3e198a412732b44594d73bbf0987c98d14c72 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 7 Sep 2023 14:12:22 +0530 Subject: [PATCH 03/67] update serde to consider canonical name and version as FQN Co-authored-by:Peter Chung --- .../syft/src/syft/capnp/recursive_serde.capnp | 1 + packages/syft/src/syft/serde/recursive.py | 35 +++++++++++++++---- packages/syft/src/syft/util/util.py | 8 ++--- 3 files changed, 33 insertions(+), 11 deletions(-) diff --git a/packages/syft/src/syft/capnp/recursive_serde.capnp b/packages/syft/src/syft/capnp/recursive_serde.capnp index 8f4b1b17953..be9aff03722 100644 --- a/packages/syft/src/syft/capnp/recursive_serde.capnp +++ b/packages/syft/src/syft/capnp/recursive_serde.capnp @@ -5,4 +5,5 @@ struct RecursiveSerde { fieldsData @1 :List(List(Data)); fullyQualifiedName @2 :Text; nonrecursiveBlob @3 :List(Data); + version @4 :Text; } diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 47d151890ad..24b03a9601d 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -3,7 +3,7 @@ from enum import EnumMeta import sys import types -from typing import Any +from typing import Any, Dict from typing import Callable from typing import List from typing import Optional @@ -23,7 +23,7 @@ from ..util.util import index_syft_by_module_name from .capnp import get_capnp_schema -TYPE_BANK = {} +TYPE_BANK: Dict = {} recursive_scheme = get_capnp_schema("recursive_serde.capnp").RecursiveSerde # type: ignore @@ -94,11 +94,16 @@ def recursive_serde_register( alias_fqn = check_fqn_alias(cls) cls = type(cls) if not isinstance(cls, type) else cls - if hasattr(cls, "__version__"): - fqn = f"{cls.__module__}.{cls.__name__}.{cls.__version__}" + canonical_name = getattr(cls, "__canonical_name__", None) + if canonical_name: + fqn = f"{cls.__module__}.{cls.__canonical_name__}" else: fqn = f"{cls.__module__}.{cls.__name__}" + version = None + if canonical_name: + version = str(getattr(cls, "__version__", None)) + nonrecursive = bool(serialize and deserialize) _serialize = serialize if nonrecursive else rs_object2proto _deserialize = deserialize if nonrecursive else rs_proto2object @@ -152,7 +157,13 @@ def recursive_serde_register( attribute_types, ) - TYPE_BANK[fqn] = serde_attributes + if version: + if fqn in TYPE_BANK: + TYPE_BANK[fqn][version] = serde_attributes + else: + TYPE_BANK[fqn] = {version: serde_attributes} + else: + TYPE_BANK[fqn] = serde_attributes if isinstance(alias_fqn, tuple): for alias in alias_fqn: @@ -191,11 +202,15 @@ def rs_object2proto(self: Any, for_hashing: bool = False) -> _DynamicStructBuild msg = recursive_scheme.new_message() fqn = get_fully_qualified_name(self) + version = str(self.__version__) if hasattr(self, "__canonical_name__") else None if fqn not in TYPE_BANK: # third party raise Exception(f"{fqn} not in TYPE_BANK") msg.fullyQualifiedName = fqn + msg.version = version if version is not None else "" + + print("Message version", msg.version) ( nonrecursive, serialize, @@ -206,7 +221,9 @@ def rs_object2proto(self: Any, for_hashing: bool = False) -> _DynamicStructBuild hash_exclude_attrs, cls, attribute_types, - ) = TYPE_BANK[fqn] + ) = ( + TYPE_BANK[fqn] if version is None else TYPE_BANK[fqn][version] + ) if nonrecursive or is_type: if serialize is None: @@ -307,7 +324,11 @@ def rs_proto2object(proto: _DynamicStructBuilder) -> Any: hash_exclude_attrs, cls, attribute_types, - ) = TYPE_BANK[proto.fullyQualifiedName] + ) = ( + TYPE_BANK[proto.fullyQualifiedName][proto.version] + if proto.version + else TYPE_BANK[proto.fullyQualifiedName] + ) if class_type == type(None): # yes this looks stupid but it works and the opposite breaks diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 1089ef7f102..6ee1fc16a46 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -144,12 +144,12 @@ def get_fully_qualified_name(obj: object) -> str: fqn = obj.__class__.__module__ try: - fqn += "." + obj.__class__.__name__ + if hasattr(obj, "__canonical_name__"): + fqn += "." + obj.__canonical_name__ + else: + fqn += "." + obj.__class__.__name__ except Exception as e: error(f"Failed to get FQN: {e}") - - if hasattr(obj, "__version__"): - fqn += "." + str(obj.__version__) return fqn From 747505ae1be480c84096d1377ae4e7f4469ed7c8 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 11 Sep 2023 13:07:12 +0530 Subject: [PATCH 04/67] Revert "update serde to consider canonical name and version as FQN" This reverts commit 91c3e198a412732b44594d73bbf0987c98d14c72. --- .../syft/src/syft/capnp/recursive_serde.capnp | 1 - packages/syft/src/syft/serde/recursive.py | 35 ++++--------------- packages/syft/src/syft/util/util.py | 8 ++--- 3 files changed, 11 insertions(+), 33 deletions(-) diff --git a/packages/syft/src/syft/capnp/recursive_serde.capnp b/packages/syft/src/syft/capnp/recursive_serde.capnp index be9aff03722..8f4b1b17953 100644 --- a/packages/syft/src/syft/capnp/recursive_serde.capnp +++ b/packages/syft/src/syft/capnp/recursive_serde.capnp @@ -5,5 +5,4 @@ struct RecursiveSerde { fieldsData @1 :List(List(Data)); fullyQualifiedName @2 :Text; nonrecursiveBlob @3 :List(Data); - version @4 :Text; } diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 24b03a9601d..47d151890ad 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -3,7 +3,7 @@ from enum import EnumMeta import sys import types -from typing import Any, Dict +from typing import Any from typing import Callable from typing import List from typing import Optional @@ -23,7 +23,7 @@ from ..util.util import index_syft_by_module_name from .capnp import get_capnp_schema -TYPE_BANK: Dict = {} +TYPE_BANK = {} recursive_scheme = get_capnp_schema("recursive_serde.capnp").RecursiveSerde # type: ignore @@ -94,16 +94,11 @@ def recursive_serde_register( alias_fqn = check_fqn_alias(cls) cls = type(cls) if not isinstance(cls, type) else cls - canonical_name = getattr(cls, "__canonical_name__", None) - if canonical_name: - fqn = f"{cls.__module__}.{cls.__canonical_name__}" + if hasattr(cls, "__version__"): + fqn = f"{cls.__module__}.{cls.__name__}.{cls.__version__}" else: fqn = f"{cls.__module__}.{cls.__name__}" - version = None - if canonical_name: - version = str(getattr(cls, "__version__", None)) - nonrecursive = bool(serialize and deserialize) _serialize = serialize if nonrecursive else rs_object2proto _deserialize = deserialize if nonrecursive else rs_proto2object @@ -157,13 +152,7 @@ def recursive_serde_register( attribute_types, ) - if version: - if fqn in TYPE_BANK: - TYPE_BANK[fqn][version] = serde_attributes - else: - TYPE_BANK[fqn] = {version: serde_attributes} - else: - TYPE_BANK[fqn] = serde_attributes + TYPE_BANK[fqn] = serde_attributes if isinstance(alias_fqn, tuple): for alias in alias_fqn: @@ -202,15 +191,11 @@ def rs_object2proto(self: Any, for_hashing: bool = False) -> _DynamicStructBuild msg = recursive_scheme.new_message() fqn = get_fully_qualified_name(self) - version = str(self.__version__) if hasattr(self, "__canonical_name__") else None if fqn not in TYPE_BANK: # third party raise Exception(f"{fqn} not in TYPE_BANK") msg.fullyQualifiedName = fqn - msg.version = version if version is not None else "" - - print("Message version", msg.version) ( nonrecursive, serialize, @@ -221,9 +206,7 @@ def rs_object2proto(self: Any, for_hashing: bool = False) -> _DynamicStructBuild hash_exclude_attrs, cls, attribute_types, - ) = ( - TYPE_BANK[fqn] if version is None else TYPE_BANK[fqn][version] - ) + ) = TYPE_BANK[fqn] if nonrecursive or is_type: if serialize is None: @@ -324,11 +307,7 @@ def rs_proto2object(proto: _DynamicStructBuilder) -> Any: hash_exclude_attrs, cls, attribute_types, - ) = ( - TYPE_BANK[proto.fullyQualifiedName][proto.version] - if proto.version - else TYPE_BANK[proto.fullyQualifiedName] - ) + ) = TYPE_BANK[proto.fullyQualifiedName] if class_type == type(None): # yes this looks stupid but it works and the opposite breaks diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 6ee1fc16a46..1089ef7f102 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -144,12 +144,12 @@ def get_fully_qualified_name(obj: object) -> str: fqn = obj.__class__.__module__ try: - if hasattr(obj, "__canonical_name__"): - fqn += "." + obj.__canonical_name__ - else: - fqn += "." + obj.__class__.__name__ + fqn += "." + obj.__class__.__name__ except Exception as e: error(f"Failed to get FQN: {e}") + + if hasattr(obj, "__version__"): + fqn += "." + str(obj.__version__) return fqn From 770feba8d44b331aa2e4c2fbb6575910c7a3ee73 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 11 Sep 2023 13:15:33 +0530 Subject: [PATCH 05/67] revert to use only class name as FQN --- packages/syft/src/syft/serde/recursive.py | 9 +++++---- packages/syft/src/syft/util/util.py | 3 --- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 47d151890ad..19ac5170a21 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -94,10 +94,7 @@ def recursive_serde_register( alias_fqn = check_fqn_alias(cls) cls = type(cls) if not isinstance(cls, type) else cls - if hasattr(cls, "__version__"): - fqn = f"{cls.__module__}.{cls.__name__}.{cls.__version__}" - else: - fqn = f"{cls.__module__}.{cls.__name__}" + fqn = f"{cls.__module__}.{cls.__name__}" nonrecursive = bool(serialize and deserialize) _serialize = serialize if nonrecursive else rs_object2proto @@ -138,6 +135,7 @@ def recursive_serde_register( attributes = set(attribute_list) if attribute_list else None attribute_types = get_types(cls, attributes) serde_overrides = getattr(cls, "__serde_overrides__", {}) + version = getattr(cls, "__version__", None) # without fqn duplicate class names overwrite serde_attributes = ( @@ -150,6 +148,7 @@ def recursive_serde_register( hash_exclude_attrs, cls, attribute_types, + version, ) TYPE_BANK[fqn] = serde_attributes @@ -206,6 +205,7 @@ def rs_object2proto(self: Any, for_hashing: bool = False) -> _DynamicStructBuild hash_exclude_attrs, cls, attribute_types, + version, ) = TYPE_BANK[fqn] if nonrecursive or is_type: @@ -307,6 +307,7 @@ def rs_proto2object(proto: _DynamicStructBuilder) -> Any: hash_exclude_attrs, cls, attribute_types, + version, ) = TYPE_BANK[proto.fullyQualifiedName] if class_type == type(None): diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 1089ef7f102..8f76f6de5be 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -147,9 +147,6 @@ def get_fully_qualified_name(obj: object) -> str: fqn += "." + obj.__class__.__name__ except Exception as e: error(f"Failed to get FQN: {e}") - - if hasattr(obj, "__version__"): - fqn += "." + str(obj.__version__) return fqn From ee5995f5ec065d213e8edf1d6adffddca85a5491 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 11 Sep 2023 13:54:18 +0530 Subject: [PATCH 06/67] Added a syft migration registry Co-authored-by: Peter Chung --- .../syft/src/syft/types/syft_migration.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 packages/syft/src/syft/types/syft_migration.py diff --git a/packages/syft/src/syft/types/syft_migration.py b/packages/syft/src/syft/types/syft_migration.py new file mode 100644 index 00000000000..ba252c12017 --- /dev/null +++ b/packages/syft/src/syft/types/syft_migration.py @@ -0,0 +1,54 @@ +# stdlib +from typing import Callable +from typing import Dict +from typing import Type + +# relative +from ..util.autoreload import autoreload_enabled +from .syft_object import SyftObject + + +class SyftMigrationRegistry: + __object_version_registry__: Dict[str, Dict[int : Type["SyftObject"]]] = {} + __object_transform_registry__: Dict[str, Dict[str, Callable]] = {} + + @classmethod + def register_version(cls, klass: Type[SyftObject]) -> None: + klass = type(klass) if not isinstance(klass, type) else cls + fqn = f"{klass.__module__}.{klass.__name__}" + klass_version = klass.__version__ + + if hasattr(klass, "__canonical_name__") and hasattr(klass, "__version__"): + mapping_string = klass.__canonical_name__ + + if ( + mapping_string in cls.__object_version_registry__ + and not autoreload_enabled() + ): + versions = klass.__object_version_registry__[mapping_string] + versions[klass_version] = fqn + else: + # only if the cls has not been registered do we want to register it + cls.__object_version_registry__[mapping_string] = {klass_version: fqn} + + @classmethod + def register_transform( + cls, klass_type: str, version_from: int, version_to: int, method: Callable + ) -> None: + if klass_type not in cls.__object_version_registry__: + raise Exception(f"{klass_type} is not yet registered.") + + available_versions = cls.__object_version_registry__[klass_type] + + versions_exists = ( + version_from in available_versions and version_to in available_versions + ) + + if versions_exists: + mapping_string = f"{version_from}x{version_to}" + cls.__object_transform_registry__[klass_type][mapping_string] = method + + raise Exception( + f"Available versions for {klass_type} are: {available_versions}." + f"You're trying to add a transform from version: {version_from} to version: {version_to}" + ) From 62c513183bd05b188ce2d204f4ad09fb9ef3d757 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 12 Sep 2023 12:13:03 +0530 Subject: [PATCH 07/67] added SyftMigrationRegistry class added migrate decorator to transform data from and to different versions --- packages/syft/src/syft/serde/recursive.py | 5 ++ .../syft/src/syft/types/syft_migration.py | 65 +++++++++++++++++-- 2 files changed, 64 insertions(+), 6 deletions(-) diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 19ac5170a21..9d598eb4fd3 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -19,6 +19,7 @@ import syft as sy # relative +from ..types.syft_migration import SyftMigrationRegistry from ..util.util import get_fully_qualified_name from ..util.util import index_syft_by_module_name from .capnp import get_capnp_schema @@ -153,6 +154,10 @@ def recursive_serde_register( TYPE_BANK[fqn] = serde_attributes + # Register Version for the class object + if hasattr(cls, "__canonical_name__") and hasattr(cls, "__version__"): + SyftMigrationRegistry.register_version(cls) + if isinstance(alias_fqn, tuple): for alias in alias_fqn: TYPE_BANK[alias] = serde_attributes diff --git a/packages/syft/src/syft/types/syft_migration.py b/packages/syft/src/syft/types/syft_migration.py index ba252c12017..534953f42ac 100644 --- a/packages/syft/src/syft/types/syft_migration.py +++ b/packages/syft/src/syft/types/syft_migration.py @@ -1,11 +1,15 @@ # stdlib from typing import Callable from typing import Dict +from typing import Optional from typing import Type +from typing import Union # relative from ..util.autoreload import autoreload_enabled from .syft_object import SyftObject +from .transforms import generate_transform_wrapper +from .transforms import validate_klass_and_version class SyftMigrationRegistry: @@ -33,12 +37,12 @@ def register_version(cls, klass: Type[SyftObject]) -> None: @classmethod def register_transform( - cls, klass_type: str, version_from: int, version_to: int, method: Callable + cls, klass_type_str: str, version_from: int, version_to: int, method: Callable ) -> None: - if klass_type not in cls.__object_version_registry__: - raise Exception(f"{klass_type} is not yet registered.") + if klass_type_str not in cls.__object_version_registry__: + raise Exception(f"{klass_type_str} is not yet registered.") - available_versions = cls.__object_version_registry__[klass_type] + available_versions = cls.__object_version_registry__[klass_type_str] versions_exists = ( version_from in available_versions and version_to in available_versions @@ -46,9 +50,58 @@ def register_transform( if versions_exists: mapping_string = f"{version_from}x{version_to}" - cls.__object_transform_registry__[klass_type][mapping_string] = method + cls.__object_transform_registry__[klass_type_str][mapping_string] = method raise Exception( - f"Available versions for {klass_type} are: {available_versions}." + f"Available versions for {klass_type_str} are: {available_versions}." f"You're trying to add a transform from version: {version_from} to version: {version_to}" ) + + +def migrate( + klass_from: Union[type, str], + klass_to: Union[type, str], + version_from: Optional[int] = None, + version_to: Optional[int] = None, +) -> Callable: + ( + klass_from_str, + version_from, + klass_to_str, + version_to, + ) = validate_klass_and_version( + klass_from=klass_from, + version_from=version_from, + klass_to=klass_to, + version_to=version_to, + ) + + if klass_from_str != klass_to_str: + raise Exception( + "Migration can only be performed across classes with same canonical name." + f"Provided args: klass_from: {klass_from_str}, klass_to: {klass_to_str}" + ) + + if version_from is None or version_to is None: + raise Exception( + "Version information missing at either of the classes." + f"{klass_from_str} has version: {version_from}, {klass_to_str} has version: {version_to}" + ) + + def decorator(function: Callable): + transforms = function() + + wrapper = generate_transform_wrapper( + klass_from=klass_from, klass_to=klass_to, transforms=transforms + ) + + SyftMigrationRegistry.register_transform( + klass_type_str=klass_from_str, + version_from=version_from, + version_to=version_to, + method=wrapper, + ) + + return function + + return decorator From 2c87a48ad852d3f7e14b308f05ad30549cf70904 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 12 Sep 2023 14:17:21 +0530 Subject: [PATCH 08/67] - add syft object metadata class to store object metadata information - Added a stash layer for basic CRUD - Added a object search API to search object info by name Co-authored-by: Kien Dang Co-authored-by: Peter Chung --- packages/syft/src/syft/serde/recursive.py | 7 +- .../service/object_search/object_metadata.py | 84 +++++++++++++++++++ .../object_search/object_service_service.py | 44 ++++++++++ 3 files changed, 131 insertions(+), 4 deletions(-) create mode 100644 packages/syft/src/syft/service/object_search/object_metadata.py create mode 100644 packages/syft/src/syft/service/object_search/object_service_service.py diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 9d598eb4fd3..7bd549e0802 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -19,7 +19,6 @@ import syft as sy # relative -from ..types.syft_migration import SyftMigrationRegistry from ..util.util import get_fully_qualified_name from ..util.util import index_syft_by_module_name from .capnp import get_capnp_schema @@ -154,9 +153,9 @@ def recursive_serde_register( TYPE_BANK[fqn] = serde_attributes - # Register Version for the class object - if hasattr(cls, "__canonical_name__") and hasattr(cls, "__version__"): - SyftMigrationRegistry.register_version(cls) + # # Register Version for the class object + # if hasattr(cls, "__canonical_name__") and hasattr(cls, "__version__"): + # SyftMigrationRegistry.register_version(cls) if isinstance(alias_fqn, tuple): for alias in alias_fqn: diff --git a/packages/syft/src/syft/service/object_search/object_metadata.py b/packages/syft/src/syft/service/object_search/object_metadata.py new file mode 100644 index 00000000000..f7ab83895a4 --- /dev/null +++ b/packages/syft/src/syft/service/object_search/object_metadata.py @@ -0,0 +1,84 @@ +# stdlib +from typing import List +from typing import Optional + +# third party +from result import Result + +# relative +from ...node.credentials import SyftVerifyKey +from ...serde import serializable +from ...store.document_store import BaseStash +from ...store.document_store import DocumentStore +from ...store.document_store import PartitionKey +from ...store.document_store import PartitionSettings +from ...types.syft_object import PartialSyftObject +from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SyftBaseObject +from ..action.action_permissions import ActionObjectPermission +from ..user.user import User + + +@serializable() +class SyftObjectMetadata(PartialSyftObject): + __canonical_name__ = "SyftObjectMetadata" + __version__ = SYFT_OBJECT_VERSION_1 + + canonical_name: str + klass_version: int + object_hash: str + + @classmethod + def from_klass(cls, klass: SyftBaseObject): + object_hash = cls.__generate_hash_(klass=klass) + return SyftObjectMetadata( + canonical_name=klass.__canonical_name__, + klass_version=klass.__version__, + object_hash=object_hash, + ) + + @staticmethod + def __generate_hash_(klass: SyftBaseObject) -> str: + unique_attrs = getattr(klass, "__attr_unique__", ()) + searchable_attrs = getattr(klass, "__attr_searchable__", ()) + return hash( + tuple( + klass.__fields__.values(), + tuple(unique_attrs), + tuple(searchable_attrs), + ) + ) + + +KlassNamePartitionKey = PartitionKey(key="canonical_name", type_=str) + + +class SyftObjectMetadataStash(BaseStash): + object_type = SyftObjectMetadata + settings: PartitionSettings = PartitionSettings( + name=User.__canonical_name__, + object_type=SyftObjectMetadata, + ) + + def __init__(self, store: DocumentStore) -> None: + super().__init__(store=store) + + def set( + self, + credentials: SyftVerifyKey, + syft_object_metadata: SyftObjectMetadata, + add_permissions: Optional[List[ActionObjectPermission]] = None, + ) -> Result[SyftObjectMetadata, str]: + res = self.check_type(syft_object_metadata, self.object_type) + # we dont use and_then logic here as it is hard because of the order of the arguments + if res.is_err(): + return res + return super().set( + credentials=credentials, obj=res.ok(), add_permissions=add_permissions + ) + + def get_by_name( + self, canonical_name: str, credentials: SyftVerifyKey + ) -> Result[SyftObjectMetadata, str]: + qks = KlassNamePartitionKey.with_obj(canonical_name) + return self.query_one(credentials=credentials, qks=qks) diff --git a/packages/syft/src/syft/service/object_search/object_service_service.py b/packages/syft/src/syft/service/object_search/object_service_service.py new file mode 100644 index 00000000000..ac7324addc6 --- /dev/null +++ b/packages/syft/src/syft/service/object_search/object_service_service.py @@ -0,0 +1,44 @@ +# stdlib +from typing import Union + +# relative +from ...serde.serializable import serializable +from ...store.document_store import DocumentStore +from ..context import AuthedServiceContext +from ..response import SyftError +from ..service import AbstractService +from ..service import service_method +from .object_metadata import SyftObjectMetadata +from .object_metadata import SyftObjectMetadataStash + + +@serializable() +class ObjectSearchService(AbstractService): + store: DocumentStore + stash: SyftObjectMetadata + + def __init__(self, store: DocumentStore) -> None: + self.store = store + self.stash: SyftObjectMetadataStash = SyftObjectMetadataStash(store=store) + + @service_method(path="object_metadata", name="search") + def search( + self, context: AuthedServiceContext, canonical_name: str + ) -> Union[SyftObjectMetadata, SyftError]: + """Search for the metadata for an object.""" + + result = self.stash.get_by_name( + canonical_name=canonical_name, credentials=context.credentials + ) + + if result.is_err(): + return SyftError(message=f"{result.err()}") + + result = result.ok() + + if result is None: + return SyftError( + message=f"No metadata exists for canonical name: {canonical_name}" + ) + + return result From 1a4c97df3865aee2d4a72478f842163c5e51658b Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 13 Sep 2023 17:40:40 +0530 Subject: [PATCH 09/67] add migration transforms for NodeMetadata rename variables to store transfroms and version in SyftMigrationRegistry fix register_transform move logic of register version to __init_subclass__ --- packages/syft/src/syft/serde/recursive.py | 4 -- .../src/syft/service/metadata/migrations.py | 21 +++++++++ .../syft/service/metadata/node_metadata.py | 5 ++- .../syft/src/syft/types/syft_migration.py | 45 ++++++++++--------- 4 files changed, 49 insertions(+), 26 deletions(-) create mode 100644 packages/syft/src/syft/service/metadata/migrations.py diff --git a/packages/syft/src/syft/serde/recursive.py b/packages/syft/src/syft/serde/recursive.py index 7bd549e0802..19ac5170a21 100644 --- a/packages/syft/src/syft/serde/recursive.py +++ b/packages/syft/src/syft/serde/recursive.py @@ -153,10 +153,6 @@ def recursive_serde_register( TYPE_BANK[fqn] = serde_attributes - # # Register Version for the class object - # if hasattr(cls, "__canonical_name__") and hasattr(cls, "__version__"): - # SyftMigrationRegistry.register_version(cls) - if isinstance(alias_fqn, tuple): for alias in alias_fqn: TYPE_BANK[alias] = serde_attributes diff --git a/packages/syft/src/syft/service/metadata/migrations.py b/packages/syft/src/syft/service/metadata/migrations.py new file mode 100644 index 00000000000..58d09021eb2 --- /dev/null +++ b/packages/syft/src/syft/service/metadata/migrations.py @@ -0,0 +1,21 @@ +# relative +from ...types.syft_migration import migrate +from ...types.transforms import rename +from .node_metadata import NodeMetadata +from .node_metadata import NodeMetadataV2 + + +@migrate(NodeMetadata, NodeMetadataV2) +def upgrade_metadata_v1_to_v2(): + return [ + rename("highest_object_version", "highest_version"), + rename("lowest_object_version", "lowest_version"), + ] + + +@migrate(NodeMetadataV2, NodeMetadata) +def downgrade_metadata_v2_to_v1(): + return [ + rename("highest_version", "highest_object_version"), + rename("lowest_version", "lowest_object_version"), + ] diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index a27fe8a704e..0c4adddff0c 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -14,6 +14,7 @@ from ...abstract_node import NodeType from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable +from ...types.syft_migration import SyftMigrationRegistry from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import StorableObjectType @@ -61,7 +62,7 @@ class NodeMetadataUpdate(SyftObject): @serializable() -class NodeMetadata(SyftObject): +class NodeMetadata(SyftObject, SyftMigrationRegistry): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_1 @@ -90,7 +91,7 @@ def check_version(self, client_version: str) -> bool: @serializable() -class NodeMetadataV2(SyftObject): +class NodeMetadataV2(SyftObject, SyftMigrationRegistry): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_2 diff --git a/packages/syft/src/syft/types/syft_migration.py b/packages/syft/src/syft/types/syft_migration.py index 534953f42ac..b9e31d610f5 100644 --- a/packages/syft/src/syft/types/syft_migration.py +++ b/packages/syft/src/syft/types/syft_migration.py @@ -1,48 +1,49 @@ # stdlib +from typing import Any from typing import Callable from typing import Dict from typing import Optional -from typing import Type from typing import Union # relative from ..util.autoreload import autoreload_enabled -from .syft_object import SyftObject from .transforms import generate_transform_wrapper from .transforms import validate_klass_and_version class SyftMigrationRegistry: - __object_version_registry__: Dict[str, Dict[int : Type["SyftObject"]]] = {} - __object_transform_registry__: Dict[str, Dict[str, Callable]] = {} + __migration_version_registry__: Dict[str, Dict[int, str]] = {} + __migration_transform_registry__: Dict[str, Dict[str, Callable]] = {} - @classmethod - def register_version(cls, klass: Type[SyftObject]) -> None: - klass = type(klass) if not isinstance(klass, type) else cls - fqn = f"{klass.__module__}.{klass.__name__}" - klass_version = klass.__version__ + def __init_subclass__(cls, **kwargs: Any) -> None: + super().__init_subclass__(**kwargs) + klass = type(cls) if not isinstance(cls, type) else cls if hasattr(klass, "__canonical_name__") and hasattr(klass, "__version__"): mapping_string = klass.__canonical_name__ + klass_version = cls.__version__ + fqn = f"{cls.__module__}.{cls.__name__}" if ( - mapping_string in cls.__object_version_registry__ + mapping_string in cls.__migration_version_registry__ and not autoreload_enabled() ): - versions = klass.__object_version_registry__[mapping_string] + versions = cls.__migration_version_registry__[mapping_string] versions[klass_version] = fqn else: # only if the cls has not been registered do we want to register it - cls.__object_version_registry__[mapping_string] = {klass_version: fqn} + cls.__migration_version_registry__[mapping_string] = { + klass_version: fqn + } @classmethod def register_transform( cls, klass_type_str: str, version_from: int, version_to: int, method: Callable ) -> None: - if klass_type_str not in cls.__object_version_registry__: + if klass_type_str not in cls.__migration_version_registry__: raise Exception(f"{klass_type_str} is not yet registered.") - available_versions = cls.__object_version_registry__[klass_type_str] + available_versions = cls.__migration_version_registry__[klass_type_str] versions_exists = ( version_from in available_versions and version_to in available_versions @@ -50,12 +51,16 @@ def register_transform( if versions_exists: mapping_string = f"{version_from}x{version_to}" - cls.__object_transform_registry__[klass_type_str][mapping_string] = method - - raise Exception( - f"Available versions for {klass_type_str} are: {available_versions}." - f"You're trying to add a transform from version: {version_from} to version: {version_to}" - ) + if klass_type_str not in cls.__migration_transform_registry__: + cls.__migration_transform_registry__[klass_type_str] = {} + cls.__migration_transform_registry__[klass_type_str][ + mapping_string + ] = method + else: + raise Exception( + f"Available versions for {klass_type_str} are: {available_versions}." + f"You're trying to add a transform from version: {version_from} to version: {version_to}" + ) def migrate( From 32d645019656714d4c52c228576f99dcbb1ba46e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 13 Sep 2023 18:14:54 +0530 Subject: [PATCH 10/67] create a SyftObject subclass that inherits SyftMigrationRegistry add get_migration method --- .../syft/service/metadata/node_metadata.py | 6 +-- .../syft/src/syft/types/syft_migration.py | 46 +++++++++++++++++++ 2 files changed, 49 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index 0c4adddff0c..48ef0364857 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -14,7 +14,7 @@ from ...abstract_node import NodeType from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable -from ...types.syft_migration import SyftMigrationRegistry +from ...types.syft_migration import SyftObjectTable from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import StorableObjectType @@ -62,7 +62,7 @@ class NodeMetadataUpdate(SyftObject): @serializable() -class NodeMetadata(SyftObject, SyftMigrationRegistry): +class NodeMetadata(SyftObjectTable): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_1 @@ -91,7 +91,7 @@ def check_version(self, client_version: str) -> bool: @serializable() -class NodeMetadataV2(SyftObject, SyftMigrationRegistry): +class NodeMetadataV2(SyftObjectTable): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_2 diff --git a/packages/syft/src/syft/types/syft_migration.py b/packages/syft/src/syft/types/syft_migration.py index b9e31d610f5..f1ec8dedb04 100644 --- a/packages/syft/src/syft/types/syft_migration.py +++ b/packages/syft/src/syft/types/syft_migration.py @@ -3,10 +3,15 @@ from typing import Callable from typing import Dict from typing import Optional +from typing import Type from typing import Union # relative from ..util.autoreload import autoreload_enabled +from .syft_object import Context +from .syft_object import SYFT_OBJECT_VERSION_1 +from .syft_object import SyftBaseObject +from .syft_object import SyftObject from .transforms import generate_transform_wrapper from .transforms import validate_klass_and_version @@ -62,6 +67,34 @@ def register_transform( f"You're trying to add a transform from version: {version_from} to version: {version_to}" ) + @classmethod + def get_migration( + cls, type_from: Type[SyftObject], type_to: Type[SyftObject] + ) -> Callable: + for type_from_mro in type_from.mro(): + if issubclass(type_from_mro, SyftBaseObject): + klass_from = type_from_mro.__canonical_name__ + version_from = type_from_mro.__version__ + + for type_to_mro in type_to.mro(): + if issubclass(type_to_mro, SyftBaseObject): + klass_to = type_to_mro.__canonical_name__ + version_to = type_to_mro.__version__ + + if klass_from == klass_to: + mapping_string = f"{version_from}x{version_to}" + if ( + mapping_string + in cls.__migration_transform_registry__[klass_from] + ): + return cls.__migration_transform_registry__[klass_from][ + mapping_string + ] + + raise Exception( + f"No migration found for: {type_from} to {type_to} in the migration registry." + ) + def migrate( klass_from: Union[type, str], @@ -110,3 +143,16 @@ def decorator(function: Callable): return function return decorator + + +class SyftObjectTable(SyftObject, SyftMigrationRegistry): + """Syft Object which are stored in DocumentStore.""" + + __canonical_name__ = "SyftObjectTable" + __version__ = SYFT_OBJECT_VERSION_1 + + def migrate_to(self, projection: type, context: Optional[Context] = None) -> Any: + migration_transform = SyftMigrationRegistry.get_migration( + type(self), projection + ) + return migration_transform(self, context) From e5a32160c52214cbb60bae14ff0539971e91e6a3 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 14 Sep 2023 18:21:15 +0530 Subject: [PATCH 11/67] - remove SyftObjectTable class - move SyftMigrationRegistry to SyftObject - move migrate_to method to SyftObject - add method to get migration transform for given version --- .../syft/service/metadata/node_metadata.py | 5 +- packages/syft/src/syft/types/base.py | 2 + .../syft/src/syft/types/syft_migration.py | 102 +---------------- packages/syft/src/syft/types/syft_object.py | 108 +++++++++++++++++- 4 files changed, 109 insertions(+), 108 deletions(-) diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index 48ef0364857..a27fe8a704e 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -14,7 +14,6 @@ from ...abstract_node import NodeType from ...node.credentials import SyftVerifyKey from ...serde.serializable import serializable -from ...types.syft_migration import SyftObjectTable from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import StorableObjectType @@ -62,7 +61,7 @@ class NodeMetadataUpdate(SyftObject): @serializable() -class NodeMetadata(SyftObjectTable): +class NodeMetadata(SyftObject): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_1 @@ -91,7 +90,7 @@ def check_version(self, client_version: str) -> bool: @serializable() -class NodeMetadataV2(SyftObjectTable): +class NodeMetadataV2(SyftObject): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_2 diff --git a/packages/syft/src/syft/types/base.py b/packages/syft/src/syft/types/base.py index de182c0f71a..bb5160aebb0 100644 --- a/packages/syft/src/syft/types/base.py +++ b/packages/syft/src/syft/types/base.py @@ -1,3 +1,5 @@ +# stdlib + # third party from pydantic import BaseModel diff --git a/packages/syft/src/syft/types/syft_migration.py b/packages/syft/src/syft/types/syft_migration.py index f1ec8dedb04..86f99320d8e 100644 --- a/packages/syft/src/syft/types/syft_migration.py +++ b/packages/syft/src/syft/types/syft_migration.py @@ -1,101 +1,14 @@ # stdlib -from typing import Any from typing import Callable -from typing import Dict from typing import Optional -from typing import Type from typing import Union # relative -from ..util.autoreload import autoreload_enabled -from .syft_object import Context -from .syft_object import SYFT_OBJECT_VERSION_1 -from .syft_object import SyftBaseObject -from .syft_object import SyftObject +from .syft_object import SyftMigrationRegistry from .transforms import generate_transform_wrapper from .transforms import validate_klass_and_version -class SyftMigrationRegistry: - __migration_version_registry__: Dict[str, Dict[int, str]] = {} - __migration_transform_registry__: Dict[str, Dict[str, Callable]] = {} - - def __init_subclass__(cls, **kwargs: Any) -> None: - super().__init_subclass__(**kwargs) - klass = type(cls) if not isinstance(cls, type) else cls - - if hasattr(klass, "__canonical_name__") and hasattr(klass, "__version__"): - mapping_string = klass.__canonical_name__ - klass_version = cls.__version__ - fqn = f"{cls.__module__}.{cls.__name__}" - - if ( - mapping_string in cls.__migration_version_registry__ - and not autoreload_enabled() - ): - versions = cls.__migration_version_registry__[mapping_string] - versions[klass_version] = fqn - else: - # only if the cls has not been registered do we want to register it - cls.__migration_version_registry__[mapping_string] = { - klass_version: fqn - } - - @classmethod - def register_transform( - cls, klass_type_str: str, version_from: int, version_to: int, method: Callable - ) -> None: - if klass_type_str not in cls.__migration_version_registry__: - raise Exception(f"{klass_type_str} is not yet registered.") - - available_versions = cls.__migration_version_registry__[klass_type_str] - - versions_exists = ( - version_from in available_versions and version_to in available_versions - ) - - if versions_exists: - mapping_string = f"{version_from}x{version_to}" - if klass_type_str not in cls.__migration_transform_registry__: - cls.__migration_transform_registry__[klass_type_str] = {} - cls.__migration_transform_registry__[klass_type_str][ - mapping_string - ] = method - else: - raise Exception( - f"Available versions for {klass_type_str} are: {available_versions}." - f"You're trying to add a transform from version: {version_from} to version: {version_to}" - ) - - @classmethod - def get_migration( - cls, type_from: Type[SyftObject], type_to: Type[SyftObject] - ) -> Callable: - for type_from_mro in type_from.mro(): - if issubclass(type_from_mro, SyftBaseObject): - klass_from = type_from_mro.__canonical_name__ - version_from = type_from_mro.__version__ - - for type_to_mro in type_to.mro(): - if issubclass(type_to_mro, SyftBaseObject): - klass_to = type_to_mro.__canonical_name__ - version_to = type_to_mro.__version__ - - if klass_from == klass_to: - mapping_string = f"{version_from}x{version_to}" - if ( - mapping_string - in cls.__migration_transform_registry__[klass_from] - ): - return cls.__migration_transform_registry__[klass_from][ - mapping_string - ] - - raise Exception( - f"No migration found for: {type_from} to {type_to} in the migration registry." - ) - - def migrate( klass_from: Union[type, str], klass_to: Union[type, str], @@ -143,16 +56,3 @@ def decorator(function: Callable): return function return decorator - - -class SyftObjectTable(SyftObject, SyftMigrationRegistry): - """Syft Object which are stored in DocumentStore.""" - - __canonical_name__ = "SyftObjectTable" - __version__ = SYFT_OBJECT_VERSION_1 - - def migrate_to(self, projection: type, context: Optional[Context] = None) -> Any: - migration_transform = SyftMigrationRegistry.get_migration( - type(self), projection - ) - return migration_transform(self, context) diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 74fe7ee5f2e..79d584e6c27 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -23,7 +23,6 @@ # third party import pandas as pd import pydantic -from pydantic import BaseModel from pydantic import EmailStr from pydantic.fields import Undefined from result import OkErr @@ -31,8 +30,8 @@ # relative from ..node.credentials import SyftVerifyKey +from ..serde import serialize from ..serde.recursive_primitives import recursive_serde_register_type -from ..serde.serialize import _serialize as serialize from ..util.autoreload import autoreload_enabled from ..util.markdown import as_markdown_python_code from ..util.notebook_ui.notebook_addons import create_table_template @@ -80,7 +79,7 @@ def hash(self) -> str: return self.__sha256__().hex() -class SyftBaseObject(BaseModel, SyftHashableObject): +class SyftBaseObject(pydantic.BaseModel, SyftHashableObject): class Config: arbitrary_types_allowed = True @@ -179,10 +178,105 @@ def get_transform( ) +class SyftMigrationRegistry: + __migration_version_registry__: Dict[str, Dict[int, str]] = {} + __migration_transform_registry__: Dict[str, Dict[str, Callable]] = {} + + def __init_subclass__(cls, **kwargs: Any) -> None: + super().__init_subclass__(**kwargs) + klass = type(cls) if not isinstance(cls, type) else cls + + if hasattr(klass, "__canonical_name__") and hasattr(klass, "__version__"): + mapping_string = klass.__canonical_name__ + klass_version = cls.__version__ + fqn = f"{cls.__module__}.{cls.__name__}" + + if ( + mapping_string in cls.__migration_version_registry__ + and not autoreload_enabled() + ): + versions = cls.__migration_version_registry__[mapping_string] + versions[klass_version] = fqn + else: + # only if the cls has not been registered do we want to register it + cls.__migration_version_registry__[mapping_string] = { + klass_version: fqn + } + + @classmethod + def register_transform( + cls, klass_type_str: str, version_from: int, version_to: int, method: Callable + ) -> None: + if klass_type_str not in cls.__migration_version_registry__: + raise Exception(f"{klass_type_str} is not yet registered.") + + available_versions = cls.__migration_version_registry__[klass_type_str] + + versions_exists = ( + version_from in available_versions and version_to in available_versions + ) + + if versions_exists: + mapping_string = f"{version_from}x{version_to}" + if klass_type_str not in cls.__migration_transform_registry__: + cls.__migration_transform_registry__[klass_type_str] = {} + cls.__migration_transform_registry__[klass_type_str][ + mapping_string + ] = method + else: + raise Exception( + f"Available versions for {klass_type_str} are: {available_versions}." + f"You're trying to add a transform from version: {version_from} to version: {version_to}" + ) + + @classmethod + def get_migration( + cls, type_from: Type[SyftBaseObject], type_to: Type[SyftBaseObject] + ) -> Callable: + for type_from_mro in type_from.mro(): + if issubclass(type_from_mro, SyftBaseObject): + klass_from = type_from_mro.__canonical_name__ + version_from = type_from_mro.__version__ + + for type_to_mro in type_to.mro(): + if issubclass(type_to_mro, SyftBaseObject): + klass_to = type_to_mro.__canonical_name__ + version_to = type_to_mro.__version__ + + if klass_from == klass_to: + mapping_string = f"{version_from}x{version_to}" + if ( + mapping_string + in cls.__migration_transform_registry__[klass_from] + ): + return cls.__migration_transform_registry__[klass_from][ + mapping_string + ] + + @classmethod + def get_migration_for_version( + cls, type_from: Type[SyftBaseObject], version_to: int + ) -> Callable: + for type_from_mro in type_from.mro(): + if issubclass(type_from_mro, SyftBaseObject): + klass_from = type_from_mro.__canonical_name__ + version_from = type_from_mro.__version__ + mapping_string = f"{version_from}x{version_to}" + if mapping_string in cls.__migration_transform_registry__[klass_from]: + return cls.__migration_transform_registry__[klass_from][ + mapping_string + ] + + raise Exception( + f"No migration found for class type: {type_from} to " + "version: {version_to} in the migration registry." + ) + + print_type_cache = defaultdict(list) -class SyftObject(SyftBaseObject, SyftObjectRegistry): +class SyftObject(SyftBaseObject, SyftObjectRegistry, SyftMigrationRegistry): __canonical_name__ = "SyftObject" __version__ = SYFT_OBJECT_VERSION_1 @@ -458,6 +552,12 @@ def _syft_unique_keys_dict(cls) -> Dict[str, type]: def _syft_searchable_keys_dict(cls) -> Dict[str, type]: return cls._syft_keys_types_dict("__attr_searchable__") + def migrate_to(self, version: int, context: Optional[Context] = None) -> Any: + migration_transform = SyftMigrationRegistry.get_migration_for_version( + type_from=type(self), version_to=version + ) + return migration_transform(self, context) + def short_qual_name(name: str) -> str: # If the name is a qualname of formax a.b.c.d we will only get d From 331015cd652dc221aad10e3e0609224d2ebcc49b Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 15 Sep 2023 19:02:07 +0530 Subject: [PATCH 12/67] rename SyftObjectMetadata to SyftObjectMigrationState rename stash and service for SyftObjectMetadata to MigrationStateStash and service --- ..._service.py => migration_state_service.py} | 24 +++++----- ..._metadata.py => object_migration_state.py} | 45 +++++-------------- 2 files changed, 23 insertions(+), 46 deletions(-) rename packages/syft/src/syft/service/object_search/{object_service_service.py => migration_state_service.py} (58%) rename packages/syft/src/syft/service/object_search/{object_metadata.py => object_migration_state.py} (55%) diff --git a/packages/syft/src/syft/service/object_search/object_service_service.py b/packages/syft/src/syft/service/object_search/migration_state_service.py similarity index 58% rename from packages/syft/src/syft/service/object_search/object_service_service.py rename to packages/syft/src/syft/service/object_search/migration_state_service.py index ac7324addc6..cad1338587c 100644 --- a/packages/syft/src/syft/service/object_search/object_service_service.py +++ b/packages/syft/src/syft/service/object_search/migration_state_service.py @@ -8,23 +8,23 @@ from ..response import SyftError from ..service import AbstractService from ..service import service_method -from .object_metadata import SyftObjectMetadata -from .object_metadata import SyftObjectMetadataStash +from .object_migration_state import SyftMigrationStateStash +from .object_migration_state import SyftObjectMigrationState @serializable() -class ObjectSearchService(AbstractService): +class MigrateStateService(AbstractService): store: DocumentStore - stash: SyftObjectMetadata + stash: SyftObjectMigrationState def __init__(self, store: DocumentStore) -> None: self.store = store - self.stash: SyftObjectMetadataStash = SyftObjectMetadataStash(store=store) + self.stash: SyftMigrationStateStash = SyftMigrationStateStash(store=store) - @service_method(path="object_metadata", name="search") - def search( + @service_method(path="migration", name="get_version") + def get_version( self, context: AuthedServiceContext, canonical_name: str - ) -> Union[SyftObjectMetadata, SyftError]: + ) -> Union[int, SyftError]: """Search for the metadata for an object.""" result = self.stash.get_by_name( @@ -34,11 +34,11 @@ def search( if result.is_err(): return SyftError(message=f"{result.err()}") - result = result.ok() + migration_state = result.ok() - if result is None: + if migration_state is None: return SyftError( - message=f"No metadata exists for canonical name: {canonical_name}" + message=f"No migration state exists for canonical name: {canonical_name}" ) - return result + return migration_state.current_version diff --git a/packages/syft/src/syft/service/object_search/object_metadata.py b/packages/syft/src/syft/service/object_search/object_migration_state.py similarity index 55% rename from packages/syft/src/syft/service/object_search/object_metadata.py rename to packages/syft/src/syft/service/object_search/object_migration_state.py index f7ab83895a4..cc0759d2728 100644 --- a/packages/syft/src/syft/service/object_search/object_metadata.py +++ b/packages/syft/src/syft/service/object_search/object_migration_state.py @@ -7,57 +7,34 @@ # relative from ...node.credentials import SyftVerifyKey -from ...serde import serializable +from ...serde.serializable import serializable from ...store.document_store import BaseStash from ...store.document_store import DocumentStore from ...store.document_store import PartitionKey from ...store.document_store import PartitionSettings -from ...types.syft_object import PartialSyftObject from ...types.syft_object import SYFT_OBJECT_VERSION_1 -from ...types.syft_object import SyftBaseObject +from ...types.syft_object import SyftObject from ..action.action_permissions import ActionObjectPermission from ..user.user import User @serializable() -class SyftObjectMetadata(PartialSyftObject): - __canonical_name__ = "SyftObjectMetadata" +class SyftObjectMigrationState(SyftObject): + __canonical_name__ = "SyftObjectMigrationState" __version__ = SYFT_OBJECT_VERSION_1 canonical_name: str - klass_version: int - object_hash: str - - @classmethod - def from_klass(cls, klass: SyftBaseObject): - object_hash = cls.__generate_hash_(klass=klass) - return SyftObjectMetadata( - canonical_name=klass.__canonical_name__, - klass_version=klass.__version__, - object_hash=object_hash, - ) - - @staticmethod - def __generate_hash_(klass: SyftBaseObject) -> str: - unique_attrs = getattr(klass, "__attr_unique__", ()) - searchable_attrs = getattr(klass, "__attr_searchable__", ()) - return hash( - tuple( - klass.__fields__.values(), - tuple(unique_attrs), - tuple(searchable_attrs), - ) - ) + current_version: int KlassNamePartitionKey = PartitionKey(key="canonical_name", type_=str) -class SyftObjectMetadataStash(BaseStash): - object_type = SyftObjectMetadata +class SyftMigrationStateStash(BaseStash): + object_type = SyftObjectMigrationState settings: PartitionSettings = PartitionSettings( name=User.__canonical_name__, - object_type=SyftObjectMetadata, + object_type=SyftObjectMigrationState, ) def __init__(self, store: DocumentStore) -> None: @@ -66,9 +43,9 @@ def __init__(self, store: DocumentStore) -> None: def set( self, credentials: SyftVerifyKey, - syft_object_metadata: SyftObjectMetadata, + syft_object_metadata: SyftObjectMigrationState, add_permissions: Optional[List[ActionObjectPermission]] = None, - ) -> Result[SyftObjectMetadata, str]: + ) -> Result[SyftObjectMigrationState, str]: res = self.check_type(syft_object_metadata, self.object_type) # we dont use and_then logic here as it is hard because of the order of the arguments if res.is_err(): @@ -79,6 +56,6 @@ def set( def get_by_name( self, canonical_name: str, credentials: SyftVerifyKey - ) -> Result[SyftObjectMetadata, str]: + ) -> Result[SyftObjectMigrationState, str]: qks = KlassNamePartitionKey.with_obj(canonical_name) return self.query_one(credentials=credentials, qks=qks) From 1259a93c9f77d7ad76024adc90d7c2b483d0ed62 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 15 Sep 2023 19:21:24 +0530 Subject: [PATCH 13/67] - add property to derive latest and supported versions for SyftObjectMigrationState --- .../object_search/object_migration_state.py | 15 +++++++++++++++ packages/syft/src/syft/types/syft_object.py | 8 ++++++++ 2 files changed, 23 insertions(+) diff --git a/packages/syft/src/syft/service/object_search/object_migration_state.py b/packages/syft/src/syft/service/object_search/object_migration_state.py index cc0759d2728..17bc2b35232 100644 --- a/packages/syft/src/syft/service/object_search/object_migration_state.py +++ b/packages/syft/src/syft/service/object_search/object_migration_state.py @@ -13,6 +13,7 @@ from ...store.document_store import PartitionKey from ...store.document_store import PartitionSettings from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SyftMigrationRegistry from ...types.syft_object import SyftObject from ..action.action_permissions import ActionObjectPermission from ..user.user import User @@ -26,6 +27,20 @@ class SyftObjectMigrationState(SyftObject): canonical_name: str current_version: int + @property + def latest_version(self) -> Optional[int]: + available_versions = SyftMigrationRegistry.get_versions( + canonical_name=self.canonical_name, + ) + if available_versions is None: + return None + + return sorted(available_versions, reverse=True)[0] + + @property + def supported_versions(self) -> List: + return SyftMigrationRegistry.get_versions(self.canonical_name) + KlassNamePartitionKey = PartitionKey(key="canonical_name", type_=str) diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 79d584e6c27..ca817979be2 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -203,6 +203,14 @@ def __init_subclass__(cls, **kwargs: Any) -> None: klass_version: fqn } + @classmethod + def get_versions(cls, canonical_name: str) -> List: + available_versions = cls.__migration_version_registry__.get( + canonical_name, + {}, + ) + return list(available_versions.keys()) + @classmethod def register_transform( cls, klass_type_str: str, version_from: int, version_to: int, method: Callable From 46bf604df40c17c0d782e39f3571879523bb6a9c Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Sep 2023 14:36:29 +0530 Subject: [PATCH 14/67] add a notebook for Data Migration --- notebooks/Experimental/Data Migration.ipynb | 551 ++++++++++++++++++++ 1 file changed, 551 insertions(+) create mode 100644 notebooks/Experimental/Data Migration.ipynb diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb new file mode 100644 index 00000000000..6397ff5a1de --- /dev/null +++ b/notebooks/Experimental/Data Migration.ipynb @@ -0,0 +1,551 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "d45640dc", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "kj/filesystem-disk-unix.c++:1703: warning: PWD environment variable doesn't match current directory; pwd = /home/shubham/PySyft\n" + ] + } + ], + "source": [ + "import syft as sy\n", + "from syft.types.datetime import DateTime\n", + "from syft.service.metadata.node_metadata import NodeMetadata, NodeMetadataV2" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "9786ef80", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.types.syft_object import SyftMigrationRegistry\n", + "from syft.service.metadata.migrations import *" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "8a50ac8d", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "{'SyftObject': {1: 'syft.types.syft_object.SyftObject'}, 'PartialSyftObject': {1: 'syft.types.syft_object.PartialSyftObject'}, 'NodeServiceContext': {1: 'syft.service.context.NodeServiceContext'}, 'AuthedServiceContext': {1: 'syft.service.context.AuthedServiceContext'}, 'UnauthedServiceContext': {1: 'syft.service.context.UnauthedServiceContext'}, 'NodeMetadataUpdate': {1: 'syft.service.metadata.node_metadata.NodeMetadataUpdate'}, 'NodeMetadata': {1: 'syft.service.metadata.node_metadata.NodeMetadata', 2: 'syft.service.metadata.node_metadata.NodeMetadataV2'}, 'LinkedObject': {1: 'syft.store.linked_obj.LinkedObject'}, 'NodeConnection': {1: 'syft.client.connection.NodeConnection'}, 'APIEndpoint': {1: 'syft.client.api.APIEndpoint'}, 'SignedSyftAPICall': {1: 'syft.client.api.SignedSyftAPICall'}, 'SyftAPICall': {1: 'syft.client.api.SyftAPICall'}, 'SyftAPI': {1: 'syft.client.api.SyftAPI'}, 'User': {1: 'syft.service.user.user.User'}, 'UserUpdate': {1: 'syft.service.user.user.UserUpdate'}, 'UserCreate': {1: 'syft.service.user.user.UserCreate'}, 'UserSearch': {1: 'syft.service.user.user.UserSearch'}, 'UserView': {1: 'syft.service.user.user.UserView'}, 'UserViewPage': {1: 'syft.service.user.user.UserViewPage'}, 'UserPrivateKey': {1: 'syft.service.user.user.UserPrivateKey'}, 'NodeSettingsUpdate': {1: 'syft.service.settings.settings.NodeSettingsUpdate'}, 'NodeSettings': {1: 'syft.service.settings.settings.NodeSettings'}, 'HTTPConnection': {1: 'syft.client.client.HTTPConnection'}, 'PythonConnection': {1: 'syft.client.client.PythonConnection'}, 'DateTime': {1: 'syft.types.datetime.DateTime'}, 'BlobFile': {1: 'syft.types.blob_storage.BlobFile'}, 'SecureFilePathLocation': {1: 'syft.types.blob_storage.SecureFilePathLocation'}, 'SeaweedSecureFilePathLocation': {1: 'syft.types.blob_storage.SeaweedSecureFilePathLocation'}, 'BlobStorageEntry': {1: 'syft.types.blob_storage.BlobStorageEntry'}, 'BlobStorageMetadata': {1: 'syft.types.blob_storage.BlobStorageMetadata'}, 'CreateBlobStorageEntry': {1: 'syft.types.blob_storage.CreateBlobStorageEntry'}, 'BlobRetrieval': {1: 'syft.store.blob_storage.BlobRetrieval'}, 'SyftObjectRetrieval': {1: 'syft.store.blob_storage.SyftObjectRetrieval'}, 'BlobRetrievalByURL': {1: 'syft.store.blob_storage.BlobRetrievalByURL'}, 'BlobDeposit': {1: 'syft.store.blob_storage.BlobDeposit'}, 'WorkerSettings': {1: 'syft.node.worker_settings.WorkerSettings'}, 'HTTPNodeRoute': {1: 'syft.service.network.routes.HTTPNodeRoute'}, 'PythonNodeRoute': {1: 'syft.service.network.routes.PythonNodeRoute'}, 'EnclaveMetadata': {1: 'syft.client.enclave_client.EnclaveMetadata'}, 'DataSubject': {1: 'syft.service.data_subject.data_subject.DataSubject'}, 'DataSubjectCreate': {1: 'syft.service.data_subject.data_subject.DataSubjectCreate'}, 'DataSubjectMemberRelationship': {1: 'syft.service.data_subject.data_subject_member.DataSubjectMemberRelationship'}, 'Contributor': {1: 'syft.service.dataset.dataset.Contributor'}, 'MarkdownDescription': {1: 'syft.service.dataset.dataset.MarkdownDescription'}, 'Asset': {1: 'syft.service.dataset.dataset.Asset'}, 'CreateAsset': {1: 'syft.service.dataset.dataset.CreateAsset'}, 'Dataset': {1: 'syft.service.dataset.dataset.Dataset'}, 'DatasetPageView': {1: 'syft.service.dataset.dataset.DatasetPageView'}, 'CreateDataset': {1: 'syft.service.dataset.dataset.CreateDataset'}, 'ActionDataEmpty': {1: 'syft.service.action.action_data_empty.ActionDataEmpty'}, 'ActionFileData': {1: 'syft.service.action.action_data_empty.ActionFileData'}, 'Action': {1: 'syft.service.action.action_object.Action'}, 'ActionObject': {1: 'syft.service.action.action_object.ActionObject'}, 'AnyActionObject': {1: 'syft.service.action.action_object.AnyActionObject'}, 'TwinObject': {1: 'syft.types.twin_object.TwinObject'}, 'Policy': {1: 'syft.service.policy.policy.Policy'}, 'InputPolicy': {1: 'syft.service.policy.policy.InputPolicy'}, 'ExactMatch': {1: 'syft.service.policy.policy.ExactMatch'}, 'OutputHistory': {1: 'syft.service.policy.policy.OutputHistory'}, 'OutputPolicy': {1: 'syft.service.policy.policy.OutputPolicy'}, 'OutputPolicyExecuteCount': {1: 'syft.service.policy.policy.OutputPolicyExecuteCount'}, 'OutputPolicyExecuteOnce': {1: 'syft.service.policy.policy.OutputPolicyExecuteOnce'}, 'UserOutputPolicy': {1: 'syft.service.policy.policy.UserOutputPolicy'}, 'UserInputPolicy': {1: 'syft.service.policy.policy.UserInputPolicy'}, 'UserPolicy': {1: 'syft.service.policy.policy.UserPolicy'}, 'SubmitUserPolicy': {1: 'syft.service.policy.policy.SubmitUserPolicy'}, 'UserCode': {1: 'syft.service.code.user_code.UserCode'}, 'SubmitUserCode': {1: 'syft.service.code.user_code.SubmitUserCode'}, 'UserCodeExecutionResult': {1: 'syft.service.code.user_code.UserCodeExecutionResult'}, 'CodeHistory': {1: 'syft.service.code_history.code_history.CodeHistory'}, 'CodeHistoryView': {1: 'syft.service.code_history.code_history.CodeHistoryView'}, 'CodeHistoriesDict': {1: 'syft.service.code_history.code_history.CodeHistoriesDict'}, 'UsersCodeHistoriesDict': {1: 'syft.service.code_history.code_history.UsersCodeHistoriesDict'}, 'NodePeer': {1: 'syft.service.network.node_peer.NodePeer'}, 'ProxyClient': {1: 'syft.client.gateway_client.ProxyClient'}, 'CommandReport': {1: 'syft.service.vpn.vpn.CommandReport'}, 'CommandResult': {1: 'syft.service.vpn.vpn.CommandResult'}, 'VPNClientConnection': {1: 'syft.service.vpn.vpn.VPNClientConnection'}, 'HeadscaleAuthToken': {1: 'syft.service.vpn.headscale_client.HeadscaleAuthToken'}, 'TailscalePeer': {1: 'syft.service.vpn.tailscale_client.TailscalePeer'}, 'TailscaleStatus': {1: 'syft.service.vpn.tailscale_client.TailscaleStatus'}, 'OnDiskBlobDeposit': {1: 'syft.store.blob_storage.on_disk.OnDiskBlobDeposit'}, 'SeaweedFSBlobDeposit': {1: 'syft.store.blob_storage.seaweedfs.SeaweedFSBlobDeposit'}, 'NumpyArrayObject': {1: 'syft.service.action.numpy.NumpyArrayObject'}, 'NumpyScalarObject': {1: 'syft.service.action.numpy.NumpyScalarObject'}, 'NumpyBoolObject': {1: 'syft.service.action.numpy.NumpyBoolObject'}, 'PandasDataframeObject': {1: 'syft.service.action.pandas.PandasDataFrameObject'}, 'PandasSeriesObject': {1: 'syft.service.action.pandas.PandasSeriesObject'}, 'ReplyNotification': {1: 'syft.service.notification.notifications.ReplyNotification'}, 'Notification': {1: 'syft.service.notification.notifications.Notification'}, 'CreateNotification': {1: 'syft.service.notification.notifications.CreateNotification'}, 'Change': {1: 'syft.service.request.request.Change'}, 'ChangeStatus': {1: 'syft.service.request.request.ChangeStatus'}, 'ActionStoreChange': {1: 'syft.service.request.request.ActionStoreChange'}, 'Request': {1: 'syft.service.request.request.Request'}, 'RequestInfo': {1: 'syft.service.request.request.RequestInfo'}, 'RequestInfoFilter': {1: 'syft.service.request.request.RequestInfoFilter'}, 'SubmitRequest': {1: 'syft.service.request.request.SubmitRequest'}, 'ObjectMutation': {1: 'syft.service.request.request.ObjectMutation'}, 'EnumMutation': {1: 'syft.service.request.request.EnumMutation'}, 'UserCodeStatusChange': {1: 'syft.service.request.request.UserCodeStatusChange'}, 'ProjectEvent': {1: 'syft.service.project.project.ProjectEvent'}, 'ProjectEventAddObject': {1: 'syft.service.project.project.ProjectEventAddObject'}, 'ProjectEventAddLink': {1: 'syft.service.project.project.ProjectEventAddLink'}, 'ProjectSubEvent': {1: 'syft.service.project.project.ProjectSubEvent'}, 'ProjectThreadMessage': {1: 'syft.service.project.project.ProjectThreadMessage'}, 'ProjectMessage': {1: 'syft.service.project.project.ProjectMessage'}, 'ProjectRequestResponse': {1: 'syft.service.project.project.ProjectRequestResponse'}, 'ProjectRequest': {1: 'syft.service.project.project.ProjectRequest'}, 'AnswerProjectPoll': {1: 'syft.service.project.project.AnswerProjectPoll'}, 'ProjectPoll': {1: 'syft.service.project.project.ProjectMultipleChoicePoll'}, 'Project': {1: 'syft.service.project.project.Project'}, 'ProjectSubmit': {1: 'syft.service.project.project.ProjectSubmit'}, 'QueueItem': {1: 'syft.service.queue.queue_stash.QueueItem'}, 'ZMQClientConfig': {1: 'syft.service.queue.zmq_queue.ZMQClientConfig'}, 'Plan': {1: 'syft.service.action.plan.Plan'}}" + ], + "text/plain": [ + "{'SyftObject': {1: 'syft.types.syft_object.SyftObject'},\n", + " 'PartialSyftObject': {1: 'syft.types.syft_object.PartialSyftObject'},\n", + " 'NodeServiceContext': {1: 'syft.service.context.NodeServiceContext'},\n", + " 'AuthedServiceContext': {1: 'syft.service.context.AuthedServiceContext'},\n", + " 'UnauthedServiceContext': {1: 'syft.service.context.UnauthedServiceContext'},\n", + " 'NodeMetadataUpdate': {1: 'syft.service.metadata.node_metadata.NodeMetadataUpdate'},\n", + " 'NodeMetadata': {1: 'syft.service.metadata.node_metadata.NodeMetadata',\n", + " 2: 'syft.service.metadata.node_metadata.NodeMetadataV2'},\n", + " 'LinkedObject': {1: 'syft.store.linked_obj.LinkedObject'},\n", + " 'NodeConnection': {1: 'syft.client.connection.NodeConnection'},\n", + " 'APIEndpoint': {1: 'syft.client.api.APIEndpoint'},\n", + " 'SignedSyftAPICall': {1: 'syft.client.api.SignedSyftAPICall'},\n", + " 'SyftAPICall': {1: 'syft.client.api.SyftAPICall'},\n", + " 'SyftAPI': {1: 'syft.client.api.SyftAPI'},\n", + " 'User': {1: 'syft.service.user.user.User'},\n", + " 'UserUpdate': {1: 'syft.service.user.user.UserUpdate'},\n", + " 'UserCreate': {1: 'syft.service.user.user.UserCreate'},\n", + " 'UserSearch': {1: 'syft.service.user.user.UserSearch'},\n", + " 'UserView': {1: 'syft.service.user.user.UserView'},\n", + " 'UserViewPage': {1: 'syft.service.user.user.UserViewPage'},\n", + " 'UserPrivateKey': {1: 'syft.service.user.user.UserPrivateKey'},\n", + " 'NodeSettingsUpdate': {1: 'syft.service.settings.settings.NodeSettingsUpdate'},\n", + " 'NodeSettings': {1: 'syft.service.settings.settings.NodeSettings'},\n", + " 'HTTPConnection': {1: 'syft.client.client.HTTPConnection'},\n", + " 'PythonConnection': {1: 'syft.client.client.PythonConnection'},\n", + " 'DateTime': {1: 'syft.types.datetime.DateTime'},\n", + " 'BlobFile': {1: 'syft.types.blob_storage.BlobFile'},\n", + " 'SecureFilePathLocation': {1: 'syft.types.blob_storage.SecureFilePathLocation'},\n", + " 'SeaweedSecureFilePathLocation': {1: 'syft.types.blob_storage.SeaweedSecureFilePathLocation'},\n", + " 'BlobStorageEntry': {1: 'syft.types.blob_storage.BlobStorageEntry'},\n", + " 'BlobStorageMetadata': {1: 'syft.types.blob_storage.BlobStorageMetadata'},\n", + " 'CreateBlobStorageEntry': {1: 'syft.types.blob_storage.CreateBlobStorageEntry'},\n", + " 'BlobRetrieval': {1: 'syft.store.blob_storage.BlobRetrieval'},\n", + " 'SyftObjectRetrieval': {1: 'syft.store.blob_storage.SyftObjectRetrieval'},\n", + " 'BlobRetrievalByURL': {1: 'syft.store.blob_storage.BlobRetrievalByURL'},\n", + " 'BlobDeposit': {1: 'syft.store.blob_storage.BlobDeposit'},\n", + " 'WorkerSettings': {1: 'syft.node.worker_settings.WorkerSettings'},\n", + " 'HTTPNodeRoute': {1: 'syft.service.network.routes.HTTPNodeRoute'},\n", + " 'PythonNodeRoute': {1: 'syft.service.network.routes.PythonNodeRoute'},\n", + " 'EnclaveMetadata': {1: 'syft.client.enclave_client.EnclaveMetadata'},\n", + " 'DataSubject': {1: 'syft.service.data_subject.data_subject.DataSubject'},\n", + " 'DataSubjectCreate': {1: 'syft.service.data_subject.data_subject.DataSubjectCreate'},\n", + " 'DataSubjectMemberRelationship': {1: 'syft.service.data_subject.data_subject_member.DataSubjectMemberRelationship'},\n", + " 'Contributor': {1: 'syft.service.dataset.dataset.Contributor'},\n", + " 'MarkdownDescription': {1: 'syft.service.dataset.dataset.MarkdownDescription'},\n", + " 'Asset': {1: 'syft.service.dataset.dataset.Asset'},\n", + " 'CreateAsset': {1: 'syft.service.dataset.dataset.CreateAsset'},\n", + " 'Dataset': {1: 'syft.service.dataset.dataset.Dataset'},\n", + " 'DatasetPageView': {1: 'syft.service.dataset.dataset.DatasetPageView'},\n", + " 'CreateDataset': {1: 'syft.service.dataset.dataset.CreateDataset'},\n", + " 'ActionDataEmpty': {1: 'syft.service.action.action_data_empty.ActionDataEmpty'},\n", + " 'ActionFileData': {1: 'syft.service.action.action_data_empty.ActionFileData'},\n", + " 'Action': {1: 'syft.service.action.action_object.Action'},\n", + " 'ActionObject': {1: 'syft.service.action.action_object.ActionObject'},\n", + " 'AnyActionObject': {1: 'syft.service.action.action_object.AnyActionObject'},\n", + " 'TwinObject': {1: 'syft.types.twin_object.TwinObject'},\n", + " 'Policy': {1: 'syft.service.policy.policy.Policy'},\n", + " 'InputPolicy': {1: 'syft.service.policy.policy.InputPolicy'},\n", + " 'ExactMatch': {1: 'syft.service.policy.policy.ExactMatch'},\n", + " 'OutputHistory': {1: 'syft.service.policy.policy.OutputHistory'},\n", + " 'OutputPolicy': {1: 'syft.service.policy.policy.OutputPolicy'},\n", + " 'OutputPolicyExecuteCount': {1: 'syft.service.policy.policy.OutputPolicyExecuteCount'},\n", + " 'OutputPolicyExecuteOnce': {1: 'syft.service.policy.policy.OutputPolicyExecuteOnce'},\n", + " 'UserOutputPolicy': {1: 'syft.service.policy.policy.UserOutputPolicy'},\n", + " 'UserInputPolicy': {1: 'syft.service.policy.policy.UserInputPolicy'},\n", + " 'UserPolicy': {1: 'syft.service.policy.policy.UserPolicy'},\n", + " 'SubmitUserPolicy': {1: 'syft.service.policy.policy.SubmitUserPolicy'},\n", + " 'UserCode': {1: 'syft.service.code.user_code.UserCode'},\n", + " 'SubmitUserCode': {1: 'syft.service.code.user_code.SubmitUserCode'},\n", + " 'UserCodeExecutionResult': {1: 'syft.service.code.user_code.UserCodeExecutionResult'},\n", + " 'CodeHistory': {1: 'syft.service.code_history.code_history.CodeHistory'},\n", + " 'CodeHistoryView': {1: 'syft.service.code_history.code_history.CodeHistoryView'},\n", + " 'CodeHistoriesDict': {1: 'syft.service.code_history.code_history.CodeHistoriesDict'},\n", + " 'UsersCodeHistoriesDict': {1: 'syft.service.code_history.code_history.UsersCodeHistoriesDict'},\n", + " 'NodePeer': {1: 'syft.service.network.node_peer.NodePeer'},\n", + " 'ProxyClient': {1: 'syft.client.gateway_client.ProxyClient'},\n", + " 'CommandReport': {1: 'syft.service.vpn.vpn.CommandReport'},\n", + " 'CommandResult': {1: 'syft.service.vpn.vpn.CommandResult'},\n", + " 'VPNClientConnection': {1: 'syft.service.vpn.vpn.VPNClientConnection'},\n", + " 'HeadscaleAuthToken': {1: 'syft.service.vpn.headscale_client.HeadscaleAuthToken'},\n", + " 'TailscalePeer': {1: 'syft.service.vpn.tailscale_client.TailscalePeer'},\n", + " 'TailscaleStatus': {1: 'syft.service.vpn.tailscale_client.TailscaleStatus'},\n", + " 'OnDiskBlobDeposit': {1: 'syft.store.blob_storage.on_disk.OnDiskBlobDeposit'},\n", + " 'SeaweedFSBlobDeposit': {1: 'syft.store.blob_storage.seaweedfs.SeaweedFSBlobDeposit'},\n", + " 'NumpyArrayObject': {1: 'syft.service.action.numpy.NumpyArrayObject'},\n", + " 'NumpyScalarObject': {1: 'syft.service.action.numpy.NumpyScalarObject'},\n", + " 'NumpyBoolObject': {1: 'syft.service.action.numpy.NumpyBoolObject'},\n", + " 'PandasDataframeObject': {1: 'syft.service.action.pandas.PandasDataFrameObject'},\n", + " 'PandasSeriesObject': {1: 'syft.service.action.pandas.PandasSeriesObject'},\n", + " 'ReplyNotification': {1: 'syft.service.notification.notifications.ReplyNotification'},\n", + " 'Notification': {1: 'syft.service.notification.notifications.Notification'},\n", + " 'CreateNotification': {1: 'syft.service.notification.notifications.CreateNotification'},\n", + " 'Change': {1: 'syft.service.request.request.Change'},\n", + " 'ChangeStatus': {1: 'syft.service.request.request.ChangeStatus'},\n", + " 'ActionStoreChange': {1: 'syft.service.request.request.ActionStoreChange'},\n", + " 'Request': {1: 'syft.service.request.request.Request'},\n", + " 'RequestInfo': {1: 'syft.service.request.request.RequestInfo'},\n", + " 'RequestInfoFilter': {1: 'syft.service.request.request.RequestInfoFilter'},\n", + " 'SubmitRequest': {1: 'syft.service.request.request.SubmitRequest'},\n", + " 'ObjectMutation': {1: 'syft.service.request.request.ObjectMutation'},\n", + " 'EnumMutation': {1: 'syft.service.request.request.EnumMutation'},\n", + " 'UserCodeStatusChange': {1: 'syft.service.request.request.UserCodeStatusChange'},\n", + " 'ProjectEvent': {1: 'syft.service.project.project.ProjectEvent'},\n", + " 'ProjectEventAddObject': {1: 'syft.service.project.project.ProjectEventAddObject'},\n", + " 'ProjectEventAddLink': {1: 'syft.service.project.project.ProjectEventAddLink'},\n", + " 'ProjectSubEvent': {1: 'syft.service.project.project.ProjectSubEvent'},\n", + " 'ProjectThreadMessage': {1: 'syft.service.project.project.ProjectThreadMessage'},\n", + " 'ProjectMessage': {1: 'syft.service.project.project.ProjectMessage'},\n", + " 'ProjectRequestResponse': {1: 'syft.service.project.project.ProjectRequestResponse'},\n", + " 'ProjectRequest': {1: 'syft.service.project.project.ProjectRequest'},\n", + " 'AnswerProjectPoll': {1: 'syft.service.project.project.AnswerProjectPoll'},\n", + " 'ProjectPoll': {1: 'syft.service.project.project.ProjectMultipleChoicePoll'},\n", + " 'Project': {1: 'syft.service.project.project.Project'},\n", + " 'ProjectSubmit': {1: 'syft.service.project.project.ProjectSubmit'},\n", + " 'QueueItem': {1: 'syft.service.queue.queue_stash.QueueItem'},\n", + " 'ZMQClientConfig': {1: 'syft.service.queue.zmq_queue.ZMQClientConfig'},\n", + " 'Plan': {1: 'syft.service.action.plan.Plan'}}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "SyftMigrationRegistry.__migration_version_registry__" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "0a2c5b71", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "{'NodeMetadata': {'1x2': <function generate_transform_wrapper.<locals>.wrapper at 0x7f458137a670>, '2x1': <function generate_transform_wrapper.<locals>.wrapper at 0x7f45815fa160>}}" + ], + "text/plain": [ + "{'NodeMetadata': {'1x2': .wrapper(self: syft.service.metadata.node_metadata.NodeMetadata, context: Union[syft.types.transforms.TransformContext, syft.service.context.NodeServiceContext, NoneType] = None) -> syft.service.metadata.node_metadata.NodeMetadataV2>,\n", + " '2x1': .wrapper(self: syft.service.metadata.node_metadata.NodeMetadataV2, context: Union[syft.types.transforms.TransformContext, syft.service.context.NodeServiceContext, NoneType] = None) -> syft.service.metadata.node_metadata.NodeMetadata>}}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "SyftMigrationRegistry.__migration_transform_registry__" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "a8631e38", + "metadata": {}, + "outputs": [], + "source": [ + "node_metadata_v1 = NodeMetadata(\n", + " name=\"OM\",\n", + " highest_object_version=2,\n", + " lowest_object_version=1,\n", + " id=sy.UID(),\n", + " verify_key=sy.SyftSigningKey.generate().verify_key,\n", + " syft_version=\"0.8.2\",\n", + " node_type=\"domain\",\n", + " signup_enabled=True,\n", + " admin_email=\"info@openmined.org\",\n", + " node_side_type=\"low_side\",\n", + " show_warnings=False,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c08ff2f1", + "metadata": {}, + "outputs": [], + "source": [ + "sy.deserialize(sy.serialize(node_metadata_v1, to_bytes=True), from_bytes=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "88dd16e7", + "metadata": {}, + "outputs": [], + "source": [ + "node_metadata_v2 = NodeMetadataV2(\n", + " name=\"OM\",\n", + " highest_version=2,\n", + " lowest_version=1,\n", + " id=sy.UID(),\n", + " verify_key=sy.SyftSigningKey.generate().verify_key,\n", + " syft_version=\"0.8.2\",\n", + " node_type=\"domain\",\n", + " signup_enabled=True,\n", + " admin_email=\"info@openmined.org\",\n", + " node_side_type=\"low_side\",\n", + " show_warnings=False,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "616829fb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class NodeMetadataV2:\n", + " id: str = fc1e8d7ba15a4cc4a1bfe18f0bc890ec\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.metadata.node_metadata.NodeMetadataV2" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sy.deserialize(sy.serialize(node_metadata_v2, to_bytes=True), from_bytes=True)" + ] + }, + { + "cell_type": "markdown", + "id": "4a17c9c5", + "metadata": {}, + "source": [ + "### Migrating to different versions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f5f20a44", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "00b818fe", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class NodeMetadataV2:\n", + " id: str = 5fc7c3b2d32546d49fa702f1d3c0a53b\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.metadata.node_metadata.NodeMetadataV2" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "node_metadata_v1.migrate_to(version=2)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "5d38e1fb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class NodeMetadata:\n", + " id: str = fc1e8d7ba15a4cc4a1bfe18f0bc890ec\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.metadata.node_metadata.NodeMetadata" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "node_metadata_v2.migrate_to(version=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "7b704dc7", + "metadata": {}, + "outputs": [], + "source": [ + "node = sy.Orchestra.launch(\"test-domain\")" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "790e6fd0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['QueueItem', 'User', 'NodeSettings', 'Dataset', 'UserCode', 'Request', 'DataSubject', 'NodePeer', 'UserPolicy', 'Notification', 'DataSubjectMemberRelationship', 'Project', 'CodeHistory', 'BlobStorageEntry'])" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "node.python_node.document_store.partitions.keys()" + ] + }, + { + "cell_type": "markdown", + "id": "6eefb927", + "metadata": {}, + "source": [ + "### Migration Detection\n", + "\n", + "SyftObjectMigrationState\n", + "- canonical_name:\n", + "- current_version:\n", + "\n", + "|Name | Version\n", + "----------|--------\n", + "|User |1\n", + "|Dataset |1\n", + "|Project |2\n", + "\n", + "\n", + "- UserV2\n", + "\n", + "\n", + "- Migration to latest version:\n", + " SyftObjectMigrationState -> current version\n", + " 1 -> 2\n", + " All data is migrated to version 2\n", + " update the state to SyftObjectMigrationState current version 2\n", + " \n", + "|Name |Version\n", + "----------|--------\n", + "|User |2\n", + "|Dataset |1\n", + "|Project |2\n", + "\n", + "\n", + "Automatic Migration vs Manual Migration:\n", + "\n", + "For now we can move with Automatic Migrations where\n", + "- Setting new fields to default values\n", + "- Having transforms for handling unique fields" + ] + }, + { + "cell_type": "markdown", + "id": "dfb092f3", + "metadata": {}, + "source": [ + "### Server is running a different version from the client\n", + "\n", + "- Client -> 0.8.3 -> UserV2\n", + "- Server -> 0.8.2 -> UserV1" + ] + }, + { + "cell_type": "markdown", + "id": "5bdf5c20", + "metadata": {}, + "source": [ + "```python\n", + "@service_method():\n", + "def create(\n", + " self, \n", + " context: AuthContext, \n", + " create_user: Union[CreateUserV1, CreateUserV2]\n", + ") -> Union[CreateUserV1, CreateUserV2]:\n", + " \n", + " # we just need to take of care of \n", + " # data migrations of the table only\n", + " client_version = context.client_version\n", + " server_version = context.server_version\n", + " \n", + " create_user: UserV1 = user_create.migrate_to(server_version)\n", + "\n", + " user: UserV1 = self.stash.set(create_user, context.credentials)\n", + " \n", + " # no need for any migrations from \n", + " # Viewable/Intermediate Objects like UserViewV1 to UserViewV2\n", + " return user.migrate_to(client_version)\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "5bcf138c", + "metadata": {}, + "source": [ + "api.services.user.create(UserV2)\n", + "\n", + "\n", + "```markdown\n", + "client -> {“User”: [1], …..{“NodeMetadata”: [1,2]..}},\n", + "\n", + "client -> \n", + " args and kwargs:\n", + " - calculate the versions of the args and kwargs\n", + " - store that in a dict\n", + " - e.g. \n", + " kwargs = {\"user_create\": {\"UserCreate\": 2},}\n", + " return_annotation: {\"UserView\": [1, 2]}\n", + "```\n", + "\n", + "1. Client version is behind the server version\n", + "\n", + "\n", + "2. Server version is behind the client version\n", + " - Serialization\n", + " - Migration\n", + " \n", + "Possible Solution: \n", + " - We perform the migration transform on the client itself and serialize/deserde becomes easier.\n", + " - Server sends the ServeObjectVersionMap: {“User”: [1], …..{“NodeMetadata”: [1,2]..}}\n", + " - Client identifies if server is running a lower version:\n", + " - migrate to the supported version of object on client side\n", + " - otherwise server takes care of the migration\n", + " \n", + "`\n", + "{“User”: [1], …..{“NodeMetadata”: [1,2]..}}\n", + "\n", + "`\n", + "\n", + "this information comes along with Metadata: {“User”: [1], …..{“NodeMetadata”: [1,2]..}} and cache it a client level." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d5fb3651", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.7" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": {}, + "toc_section_display": true, + "toc_window_display": false + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 05d64b24170bdc893e936cedd2210415a0e8120f Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 18 Sep 2023 16:31:35 +0530 Subject: [PATCH 15/67] update notebook --- notebooks/Experimental/Data Migration.ipynb | 131 ++++++++++++-------- 1 file changed, 77 insertions(+), 54 deletions(-) diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index 6397ff5a1de..a46bf840400 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -2,8 +2,8 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, - "id": "d45640dc", + "execution_count": 2, + "id": "a679e5fb", "metadata": {}, "outputs": [ { @@ -22,8 +22,8 @@ }, { "cell_type": "code", - "execution_count": 2, - "id": "9786ef80", + "execution_count": 3, + "id": "27cd4d0f", "metadata": {}, "outputs": [], "source": [ @@ -33,8 +33,20 @@ }, { "cell_type": "code", - "execution_count": 8, - "id": "8a50ac8d", + "execution_count": null, + "id": "0f062500", + "metadata": {}, + "outputs": [], + "source": [ + "__migration_version_registry__ = {\n", + " \"canonical_name\": {version_number: \"klass_name\"}\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "7ac6a650", "metadata": { "scrolled": true }, @@ -164,7 +176,7 @@ " 'Plan': {1: 'syft.service.action.plan.Plan'}}" ] }, - "execution_count": 8, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } @@ -175,21 +187,21 @@ }, { "cell_type": "code", - "execution_count": 6, - "id": "0a2c5b71", + "execution_count": 5, + "id": "3790c6ca", "metadata": {}, "outputs": [ { "data": { "text/html": [ - "{'NodeMetadata': {'1x2': <function generate_transform_wrapper.<locals>.wrapper at 0x7f458137a670>, '2x1': <function generate_transform_wrapper.<locals>.wrapper at 0x7f45815fa160>}}" + "{'NodeMetadata': {'1x2': <function generate_transform_wrapper.<locals>.wrapper at 0x7f8d69d644c0>, '2x1': <function generate_transform_wrapper.<locals>.wrapper at 0x7f8d69d64700>}}" ], "text/plain": [ "{'NodeMetadata': {'1x2': .wrapper(self: syft.service.metadata.node_metadata.NodeMetadata, context: Union[syft.types.transforms.TransformContext, syft.service.context.NodeServiceContext, NoneType] = None) -> syft.service.metadata.node_metadata.NodeMetadataV2>,\n", " '2x1': .wrapper(self: syft.service.metadata.node_metadata.NodeMetadataV2, context: Union[syft.types.transforms.TransformContext, syft.service.context.NodeServiceContext, NoneType] = None) -> syft.service.metadata.node_metadata.NodeMetadata>}}" ] }, - "execution_count": 6, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -200,8 +212,8 @@ }, { "cell_type": "code", - "execution_count": 9, - "id": "a8631e38", + "execution_count": 6, + "id": "a4ec5311", "metadata": {}, "outputs": [], "source": [ @@ -222,18 +234,36 @@ }, { "cell_type": "code", - "execution_count": null, - "id": "c08ff2f1", + "execution_count": 7, + "id": "8bcf95b8", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/markdown": [ + "```python\n", + "class NodeMetadata:\n", + " id: str = 55b9c387801541b4bd7e79a574ff60c4\n", + "\n", + "```" + ], + "text/plain": [ + "syft.service.metadata.node_metadata.NodeMetadata" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "sy.deserialize(sy.serialize(node_metadata_v1, to_bytes=True), from_bytes=True)" ] }, { "cell_type": "code", - "execution_count": 10, - "id": "88dd16e7", + "execution_count": 8, + "id": "a007db0f", "metadata": {}, "outputs": [], "source": [ @@ -254,8 +284,8 @@ }, { "cell_type": "code", - "execution_count": 19, - "id": "616829fb", + "execution_count": 9, + "id": "850e094a", "metadata": {}, "outputs": [ { @@ -263,7 +293,7 @@ "text/markdown": [ "```python\n", "class NodeMetadataV2:\n", - " id: str = fc1e8d7ba15a4cc4a1bfe18f0bc890ec\n", + " id: str = 42543d06d6624d3eba4960214d8d4b44\n", "\n", "```" ], @@ -271,7 +301,7 @@ "syft.service.metadata.node_metadata.NodeMetadataV2" ] }, - "execution_count": 19, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -282,7 +312,7 @@ }, { "cell_type": "markdown", - "id": "4a17c9c5", + "id": "c6e562a5", "metadata": {}, "source": [ "### Migrating to different versions" @@ -290,16 +320,8 @@ }, { "cell_type": "code", - "execution_count": null, - "id": "f5f20a44", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "00b818fe", + "execution_count": 11, + "id": "793dd7b5", "metadata": {}, "outputs": [ { @@ -307,7 +329,7 @@ "text/markdown": [ "```python\n", "class NodeMetadataV2:\n", - " id: str = 5fc7c3b2d32546d49fa702f1d3c0a53b\n", + " id: str = 55b9c387801541b4bd7e79a574ff60c4\n", "\n", "```" ], @@ -315,7 +337,7 @@ "syft.service.metadata.node_metadata.NodeMetadataV2" ] }, - "execution_count": 15, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -326,8 +348,8 @@ }, { "cell_type": "code", - "execution_count": 16, - "id": "5d38e1fb", + "execution_count": 12, + "id": "b8b4d067", "metadata": {}, "outputs": [ { @@ -335,7 +357,7 @@ "text/markdown": [ "```python\n", "class NodeMetadata:\n", - " id: str = fc1e8d7ba15a4cc4a1bfe18f0bc890ec\n", + " id: str = 42543d06d6624d3eba4960214d8d4b44\n", "\n", "```" ], @@ -343,7 +365,7 @@ "syft.service.metadata.node_metadata.NodeMetadata" ] }, - "execution_count": 16, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -355,7 +377,7 @@ { "cell_type": "code", "execution_count": 22, - "id": "7b704dc7", + "id": "560dd0ca", "metadata": {}, "outputs": [], "source": [ @@ -365,7 +387,7 @@ { "cell_type": "code", "execution_count": 33, - "id": "790e6fd0", + "id": "3607e5bb", "metadata": {}, "outputs": [ { @@ -385,10 +407,10 @@ }, { "cell_type": "markdown", - "id": "6eefb927", + "id": "18e563aa", "metadata": {}, "source": [ - "### Migration Detection\n", + "### Migration Detection and Migrating Data\n", "\n", "SyftObjectMigrationState\n", "- canonical_name:\n", @@ -426,18 +448,18 @@ }, { "cell_type": "markdown", - "id": "dfb092f3", + "id": "ed0ae09e", "metadata": {}, "source": [ "### Server is running a different version from the client\n", "\n", - "- Client -> 0.8.3 -> UserV2\n", - "- Server -> 0.8.2 -> UserV1" + "- Client -> 0.8.2 -> UserV1\n", + "- Server -> 0.8.3 -> UserV2" ] }, { "cell_type": "markdown", - "id": "5bdf5c20", + "id": "8573a9e2", "metadata": {}, "source": [ "```python\n", @@ -458,14 +480,14 @@ " user: UserV1 = self.stash.set(create_user, context.credentials)\n", " \n", " # no need for any migrations from \n", - " # Viewable/Intermediate Objects like UserViewV1 to UserViewV2\n", + " # Viewable/Intermediate Objects like UserViewV2 to UserViewV1\n", " return user.migrate_to(client_version)\n", "```" ] }, { "cell_type": "markdown", - "id": "5bcf138c", + "id": "dbce0550", "metadata": {}, "source": [ "api.services.user.create(UserV2)\n", @@ -479,8 +501,8 @@ " - calculate the versions of the args and kwargs\n", " - store that in a dict\n", " - e.g. \n", - " kwargs = {\"user_create\": {\"UserCreate\": 2},}\n", - " return_annotation: {\"UserView\": [1, 2]}\n", + " kwargs = {\"user_create\": {\"UserCreate\": 1},}\n", + " return_annotation: {\"UserView\": [1]}\n", "```\n", "\n", "1. Client version is behind the server version\n", @@ -492,13 +514,14 @@ " \n", "Possible Solution: \n", " - We perform the migration transform on the client itself and serialize/deserde becomes easier.\n", - " - Server sends the ServeObjectVersionMap: {“User”: [1], …..{“NodeMetadata”: [1,2]..}}\n", + " - Server sends the ServerObjectVersionMap: {“User”: [1], …..{“NodeMetadata”: [1,2]..}}\n", " - Client identifies if server is running a lower version:\n", " - migrate to the supported version of object on client side\n", + " e.g. UserV2 (Client) on client side -> UserV2.migrate_to(version=1) -> send the data to the server. UserV1(server)\n", " - otherwise server takes care of the migration\n", " \n", "`\n", - "{“User”: [1], …..{“NodeMetadata”: [1,2]..}}\n", + " ServerObjectVersionMap = {“User”: [1], …..{“NodeMetadata”: [1,2]..}}\n", "\n", "`\n", "\n", @@ -508,7 +531,7 @@ { "cell_type": "code", "execution_count": null, - "id": "d5fb3651", + "id": "09741c6e", "metadata": {}, "outputs": [], "source": [] From 3a456fd2f3e67b68cd3fa1e4d89be26b138cd79a Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 19 Sep 2023 13:56:18 +0530 Subject: [PATCH 16/67] protocol brainstroming --- notebooks/Experimental/Data Migration.ipynb | 236 ++++++++++++++++++++ 1 file changed, 236 insertions(+) diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index a46bf840400..cf0ae80b0e4 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -534,6 +534,242 @@ "id": "09741c6e", "metadata": {}, "outputs": [], + "source": [ + "class User:\n", + " __canonical_name__ = \"User\"\n", + " __version__ = 1\n", + " \n", + "class UserV2:\n", + " __canonical_name__ = \"User\"\n", + " __version__ = 2\n", + "\n", + "ProtocolVersionMap\n", + "{\n", + " 1: {“User”: 1, …., “NodeMetadata”: 1..},\n", + " 2: {“User”: 1, …., “NodeMetadata”: 3..},\n", + " 3: {“User”: 2, …., “NodeMetadata”: 3..},\n", + " 4: {\"NodeMetdata\": 3},\n", + "}\n", + "\n", + "server protocol versions -> [1, 2, 3, 4]\n", + "\n", + "Client\n", + "{\n", + " 1: {“User”: 1, …..“NodeMetadata”: 1..}, -> derived from the SyftMigrationRegistry\n", + " 2: {“User”: 1, …..“NodeMetadata”: 3..},\n", + "}\n", + "\n", + "client protocol versions -> [1, 2]\n", + "\n", + "\n", + "communication protocol: 2 -> Highest of the intersection of server and client\n", + " \n", + "\n", + "{\n", + " 4: {\"NodeMetdata\": 3},\n", + "}\n", + "\n", + "server protocol versions -> [4]\n", + "\n", + "Client\n", + "ProtocolVersionMap:\n", + "{\n", + " 1: {“User”: 1, …..“NodeMetadata”: 1..}, -> derived from the SyftMigrationRegistry\n", + " 2: {“User”: 1, …..“NodeMetadata”: 3..},\n", + "}\n", + "\n", + "client protocol versions -> [1, 2]\n", + "\n", + "\n", + "communication protocol: No intersection -> Highest of the intersection of server and client\n", + " \n", + " \n", + "SyftMigrationRegistry:\n", + " - __migration_version_registry__: {\n", + " \"User\": {\"1\": \"...\", \"2\": \"....\"},\n", + " \"NodeMetadata\": {\"1\": \"...\", \"2\": \"....\", \"3\": \"....\"}\n", + " }\n", + " \n", + "\n", + "ProtocolVersionMap.lastest_object_map_hash -> efgdfaf\n", + " - {\"User\": 2, \"NodeMetadata\": 3} -> hash -> asdasfaf\n", + "\n", + " \n", + ">> syft upgrade protocol\n", + "\n", + "\n", + "\n", + "--->\n", + "migrations/01.py\n", + " - {\n", + " \"hash\": \"sdasrafa\",\n", + " \"object_versions\": {\"User\": 1, \"NodeMetadata\": 1},\n", + " \"protocol_version\": 1\n", + " }\n", + "migrations/02.py\n", + " - {\n", + " \"depends_on\": \"01.py\"\n", + " \"hash\": \"effasfa\",\n", + " \"object_versions\": {\"User\": 1, \"NodeMetadata\": 3},\n", + " \"protocol_version\": 2\n", + " }\n", + "\n", + "protocol_state.json\n", + "1: {\n", + " \"hash\": \"effasfa\"\n", + " \"object_versions\": {\"User\": 1, \"NodeMetadata\": 1},\n", + " \n", + "},\n", + "\n", + "2: {\n", + " \"hash\": \"12dsad4\",\n", + " \"object_versions\": {\"User\": 1, \"NodeMetadata\": 3},\n", + "},\n", + "\n", + "protocol_state.json\n", + "1: {\n", + " \"hash\": \"effasfa\"\n", + " \"object_versions\": {\"User\": 1, \"NodeMetadata\": [1]},\n", + " \"stale\": True\n", + "},\n", + "2: {\n", + " \"builds_on\": \n", + " \"hash\": \"12dsad4\",\n", + " \"object_versions\": {\"User\": 1, \"NodeMetadata\": [1, 2]},\n", + " \"stale\": True\n", + "},\n", + "3: {\n", + " \"builds_on\": \n", + " \"hash\": \"5235sad4\",\n", + " \"object_versions\": {\"User\": [1, 2], \"NodeMetadata\": [1,2,3]},\n", + " \"stale\": True\n", + "},\n", + "4: {\n", + " \"builds_on\": \n", + " \"hash\": \"5235sad4\",\n", + " \"object_versions\": {\"User\": [1], \"NodeMetadata\": [1,2,3]},\n", + " \"stale\": True\n", + "}\n", + "5: {\n", + " \"hash\": \"asd23144\",\n", + " \"object_versions\": {\"NodeMetadata\": [1, 2, 3]},\n", + " \"stale\": False\n", + " \n", + "}\n", + " \n", + "client: [1, 2, 3, 4]\n", + "server: [5]\n", + "communication protocol:\n", + " No intersection -> Highest of the intersection of server and client\n", + "\n", + " \n", + "- a. When we remove an Object type altogether. e.g deleted user classes.\n", + "- b. When we remove a version of the Canonical Object. e.g. deleted version 1 of the User class.\n", + "\n", + "```\n", + "client or server:\n", + " - If the version of the object being send doesn't match the version of the object in given protocol\n", + " then we migrate to the given version\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "9f5f75d0", + "metadata": {}, + "outputs": [ + { + "ename": "SyntaxError", + "evalue": "invalid syntax (2320814170.py, line 1)", + "output_type": "error", + "traceback": [ + "\u001b[0;36m Input \u001b[0;32mIn [1]\u001b[0;36m\u001b[0m\n\u001b[0;31m SyftMigrationRegistry:\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" + ] + } + ], + "source": [ + "SyftMigrationRegistry:\n", + " __migration_version_registry__ : {\n", + " \"User\": {1: \"\", 2: \"\"}\n", + " }\n", + "# >> sy.reset_protocol()\n", + ">> sy.update_protocol()\n", + "\n", + "\n", + "protocol_state.json\n", + "1: {\n", + " \"hash\": \"effasfa\"\n", + " \"object_versions\": {\"User\": [1], \"NodeMetadata\": [1]},\n", + " \"stale\": False\n", + "},\n", + "2: {\n", + " \n", + " \"hash\": \"12dsad4\",\n", + " \"object_versions\": {\"User\": [1], \"NodeMetadata\": [1, 2]},\n", + " \"stale\": False\n", + "},\n", + "3: {\n", + " \n", + " \"hash\": \"5235sad4\",\n", + " \"object_versions\": {\"User\": [1, 2], \"NodeMetadata\": [1,2,3]},\n", + " \"stale\": True\n", + "},\n", + "4: {\n", + "\n", + " \"hash\": \"5235sad4\",\n", + " \"object_versions\": {\"User\": [1], \"NodeMetadata\": [1,2,3]},\n", + " \"stale\": False\n", + "}\n", + "5: {\n", + " \"hash\": \"13124214\",\n", + " \"object_versions\": {\"User\": [1, 2, 3], \"NodeMetadata\": [1, 2, 3]},\n", + " \"stale\": False\n", + " \n", + "}\n", + " \n", + "# client: [1, 2, 3, 4]\n", + "# server: [5]\n", + "# communication protocol:\n", + "# No intersection -> Highest of the intersection of server and client\n", + "\n", + "\"object_versions\": {\n", + " \"User\": {\n", + " 1: \"sadadefafa\", \n", + " 2: \"sdasd24124\",\n", + " 3: \"asdadasafhh\"\n", + " }\n", + "}\n", + "\n", + "# hash of the object:\n", + "# hash(\n", + "# \"__canonical_name__\", __version__,\n", + "# tuple(unique_keys), field_name and field type\n", + "# )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8bbf2188", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6ee102ec", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f5aeca48", + "metadata": {}, + "outputs": [], "source": [] } ], From 941dbad7b5c0c45e3f6faf2905b75e68b8c05965 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 20 Sep 2023 10:41:11 +0530 Subject: [PATCH 17/67] add context in notebook --- notebooks/Experimental/Data Migration.ipynb | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index cf0ae80b0e4..5bf8e24ee11 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -676,7 +676,7 @@ { "cell_type": "code", "execution_count": 1, - "id": "9f5f75d0", + "id": "eecfc749", "metadata": {}, "outputs": [ { @@ -728,6 +728,11 @@ " \n", "}\n", " \n", + "# Field Stale is marked False by default. Its set to True:\n", + " - check if in the current object versions and previous object versions:\n", + " - if a version is missing for an object\n", + " - or the canonical name is altogether is missing.\n", + " \n", "# client: [1, 2, 3, 4]\n", "# server: [5]\n", "# communication protocol:\n", @@ -751,7 +756,7 @@ { "cell_type": "code", "execution_count": null, - "id": "8bbf2188", + "id": "fe2adf3f", "metadata": {}, "outputs": [], "source": [] @@ -759,7 +764,7 @@ { "cell_type": "code", "execution_count": null, - "id": "6ee102ec", + "id": "d5190d7d", "metadata": {}, "outputs": [], "source": [] @@ -767,7 +772,7 @@ { "cell_type": "code", "execution_count": null, - "id": "f5aeca48", + "id": "4ed6932c", "metadata": {}, "outputs": [], "source": [] From 80bb7750b13106a500bdedf80cf9f22848b74642 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 20 Sep 2023 16:39:00 +0530 Subject: [PATCH 18/67] add a class for managing and working with different data protocols add method to upgrade protocol generate protocol_state.json and protocol_state_dev.json --- packages/syft/src/syft/node/node.py | 5 +- .../syft/src/syft/protocol/data_protocol.py | 167 ++++++++++++++++++ .../src/syft/protocol/protocol_state.json | 124 +++++++++++++ .../src/syft/protocol/protocol_state_dev.json | 124 +++++++++++++ packages/syft/src/syft/util/util.py | 4 + 5 files changed, 420 insertions(+), 4 deletions(-) create mode 100644 packages/syft/src/syft/protocol/data_protocol.py create mode 100644 packages/syft/src/syft/protocol/protocol_state.json create mode 100644 packages/syft/src/syft/protocol/protocol_state_dev.json diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index b4584bf6a6f..ae4fd2004bc 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -96,6 +96,7 @@ from ..types.uid import UID from ..util.experimental_flags import flags from ..util.telemetry import instrument +from ..util.util import get_env from ..util.util import get_root_data_path from ..util.util import random_name from ..util.util import str_to_bool @@ -127,10 +128,6 @@ def gipc_decoder(obj_bytes): DEFAULT_ROOT_PASSWORD = "DEFAULT_ROOT_PASSWORD" # nosec -def get_env(key: str, default: Optional[Any] = None) -> Optional[str]: - return os.environ.get(key, default) - - def get_private_key_env() -> Optional[str]: return get_env(NODE_PRIVATE_KEY) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py new file mode 100644 index 00000000000..272c5705ab2 --- /dev/null +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -0,0 +1,167 @@ +# stdlib +import base64 +import hashlib +import json +import os +from pathlib import Path +from typing import Any +from typing import Dict + +# relative +from ..types.syft_object import SyftMigrationRegistry +from ..util.util import get_env +from ..util.util import str_to_bool + + +def get_dev_mode() -> bool: + return str_to_bool(get_env("DEV_MODE", "False")) + + +PROTOCOL_STATE_FILENAME = "protocol_state.json" +PROTOCOL_STATE_FILENAME_DEV = "protocol_state_dev.json" + + +def data_protocol_file_name(): + return PROTOCOL_STATE_FILENAME_DEV if get_dev_mode() else PROTOCOL_STATE_FILENAME + + +def data_protocol_dir(): + return os.path.abspath(str(Path(__file__).parent)) + + +def make_hash_sha256(obj_to_hash: Any) -> str: + def make_hashable(obj): + if isinstance(obj, (tuple, list)): + return tuple(make_hashable(e) for e in obj) + + if isinstance(obj, dict): + return tuple(sorted((k, make_hashable(v)) for k, v in obj.items())) + + if isinstance(obj, (set, frozenset)): + return tuple(sorted(make_hashable(e) for e in obj)) + + return obj + + hasher = hashlib.sha256() + hasher.update(repr(make_hashable(obj_to_hash)).encode()) + return base64.b64encode(hasher.digest()).decode() + + +class DataProtocol: + def __init__(self, filename: str) -> None: + self.file_path = Path(data_protocol_dir()) / filename + self.state = self.read_state() + + def calc_latest_object_versions(self): + object_latest_version_map = {} + object_versions = iter( + SyftMigrationRegistry.__migration_version_registry__.items() + ) + for canonical_name, available_versions in object_versions: + object_latest_version_map[canonical_name] = list(available_versions) + + return object_latest_version_map + + def read_state(self) -> Dict: + return json.loads(self.file_path.read_text()) + + def save_state(self): + self.file_path.write_text(json.dumps(self.state)) + + def find_deleted_versions( + self, + current_object_version_map: Dict, + new_object_version_map: Dict, + ): + deleted_object_classes = set(current_object_version_map) - set( + new_object_version_map + ) + + deleted_versions_map = {} + + for canonical_name, new_versions in new_object_version_map.items(): + current_versions = current_object_version_map.get(canonical_name) + if current_versions is None: + continue + + deleted_versions = list(set(current_versions) - set(new_versions)) + deleted_versions_map[canonical_name] = deleted_versions + + return deleted_object_classes, deleted_versions_map + + def compute_supported_protocol_states( + self, + current_protocol_version: int, + new_object_version_map: Dict, + ): + current_protocol_state = self.state[current_protocol_version] + deleted_object_classes, deleted_versions_map = self.find_deleted_versions( + current_protocol_state, + new_object_version_map=new_object_version_map, + ) + + for _, protocol_state in self.state.items(): + object_versions = protocol_state["object_versions"] + if protocol_state["supported"]: + continue + + # Check if any object class is deleted, + # then mark the protocol as not supported. + is_unsupported = any( + object_class in object_versions + for object_class in deleted_object_classes + ) + if is_unsupported: + protocol_state["supported"] = False + continue + + for object_class, supported_versions in deleted_versions_map.items(): + available_versions = object_versions.get(object_class, []) + unsupported_versions_present = set(available_versions).intersection( + supported_versions + ) + if unsupported_versions_present: + is_unsupported = True + break + + if is_unsupported: + protocol_state["supported"] = False + + @property + def state_defined(self): + return len(self.state) > 0 + + +def upgrade_protocol(): + data_protocol = DataProtocol(filename=data_protocol_file_name()) + + object_version_map = data_protocol.calc_latest_object_versions() + new_protocol_hash = make_hash_sha256(object_version_map) + + if not data_protocol.state_defined: + new_protocol_version = 1 + else: + # Find the current version + current_protocol_version = sorted( + data_protocol.state.keys(), + reverse=True, + )[0] + + new_protocol_version = current_protocol_version + 1 + + current_protocol_state = data_protocol.state[current_protocol_version] + if current_protocol_state["hash"] == new_protocol_hash: + print("No change in schema. Skipping upgrade.") + return + + data_protocol.compute_supported_protocol_states( + current_protocol_version=current_protocol_version, + new_object_version_map=object_version_map, + ) + + data_protocol.state[new_protocol_version] = { + "object_versions": object_version_map, + "hash": new_protocol_hash, + "supported": True, + } + data_protocol.save_state() diff --git a/packages/syft/src/syft/protocol/protocol_state.json b/packages/syft/src/syft/protocol/protocol_state.json new file mode 100644 index 00000000000..cc95b47fb17 --- /dev/null +++ b/packages/syft/src/syft/protocol/protocol_state.json @@ -0,0 +1,124 @@ +{ + "1": { + "object_versions": { + "SyftObject": [1], + "PartialSyftObject": [1], + "NodeServiceContext": [1], + "AuthedServiceContext": [1], + "UnauthedServiceContext": [1], + "NodeMetadataUpdate": [1], + "NodeMetadata": [1, 2], + "LinkedObject": [1], + "NodeConnection": [1], + "APIEndpoint": [1], + "SignedSyftAPICall": [1], + "SyftAPICall": [1], + "SyftAPI": [1], + "User": [1], + "UserUpdate": [1], + "UserCreate": [1], + "UserSearch": [1], + "UserView": [1], + "UserViewPage": [1], + "UserPrivateKey": [1], + "NodeSettingsUpdate": [1], + "NodeSettings": [1], + "HTTPConnection": [1], + "PythonConnection": [1], + "DateTime": [1], + "BlobFile": [1], + "SecureFilePathLocation": [1], + "SeaweedSecureFilePathLocation": [1], + "BlobStorageEntry": [1], + "BlobStorageMetadata": [1], + "CreateBlobStorageEntry": [1], + "BlobRetrieval": [1], + "SyftObjectRetrieval": [1], + "BlobRetrievalByURL": [1], + "BlobDeposit": [1], + "WorkerSettings": [1], + "HTTPNodeRoute": [1], + "PythonNodeRoute": [1], + "EnclaveMetadata": [1], + "DataSubject": [1], + "DataSubjectCreate": [1], + "DataSubjectMemberRelationship": [1], + "Contributor": [1], + "MarkdownDescription": [1], + "Asset": [1], + "CreateAsset": [1], + "Dataset": [1], + "DatasetPageView": [1], + "CreateDataset": [1], + "ActionDataEmpty": [1], + "ActionFileData": [1], + "Action": [1], + "ActionObject": [1], + "AnyActionObject": [1], + "TwinObject": [1], + "Policy": [1], + "InputPolicy": [1], + "ExactMatch": [1], + "OutputHistory": [1], + "OutputPolicy": [1], + "OutputPolicyExecuteCount": [1], + "OutputPolicyExecuteOnce": [1], + "UserOutputPolicy": [1], + "UserInputPolicy": [1], + "UserPolicy": [1], + "SubmitUserPolicy": [1], + "UserCode": [1], + "SubmitUserCode": [1], + "UserCodeExecutionResult": [1], + "CodeHistory": [1], + "CodeHistoryView": [1], + "CodeHistoriesDict": [1], + "UsersCodeHistoriesDict": [1], + "NodePeer": [1], + "ProxyClient": [1], + "CommandReport": [1], + "CommandResult": [1], + "VPNClientConnection": [1], + "HeadscaleAuthToken": [1], + "TailscalePeer": [1], + "TailscaleStatus": [1], + "OnDiskBlobDeposit": [1], + "SeaweedFSBlobDeposit": [1], + "NumpyArrayObject": [1], + "NumpyScalarObject": [1], + "NumpyBoolObject": [1], + "PandasDataframeObject": [1], + "PandasSeriesObject": [1], + "ReplyNotification": [1], + "Notification": [1], + "CreateNotification": [1], + "Change": [1], + "ChangeStatus": [1], + "ActionStoreChange": [1], + "Request": [1], + "RequestInfo": [1], + "RequestInfoFilter": [1], + "SubmitRequest": [1], + "ObjectMutation": [1], + "EnumMutation": [1], + "UserCodeStatusChange": [1], + "ProjectEvent": [1], + "ProjectEventAddObject": [1], + "ProjectEventAddLink": [1], + "ProjectSubEvent": [1], + "ProjectThreadMessage": [1], + "ProjectMessage": [1], + "ProjectRequestResponse": [1], + "ProjectRequest": [1], + "AnswerProjectPoll": [1], + "ProjectPoll": [1], + "Project": [1], + "ProjectSubmit": [1], + "QueueItem": [1], + "ZMQClientConfig": [1], + "Plan": [1] + }, + "hash": "cywY0k3c9tIYHL3OnoiPEabwe2fyn6SvTxcG7zvwIPA=", + "supported": true + } +} diff --git a/packages/syft/src/syft/protocol/protocol_state_dev.json b/packages/syft/src/syft/protocol/protocol_state_dev.json new file mode 100644 index 00000000000..cc95b47fb17 --- /dev/null +++ b/packages/syft/src/syft/protocol/protocol_state_dev.json @@ -0,0 +1,124 @@ +{ + "1": { + "object_versions": { + "SyftObject": [1], + "PartialSyftObject": [1], + "NodeServiceContext": [1], + "AuthedServiceContext": [1], + "UnauthedServiceContext": [1], + "NodeMetadataUpdate": [1], + "NodeMetadata": [1, 2], + "LinkedObject": [1], + "NodeConnection": [1], + "APIEndpoint": [1], + "SignedSyftAPICall": [1], + "SyftAPICall": [1], + "SyftAPI": [1], + "User": [1], + "UserUpdate": [1], + "UserCreate": [1], + "UserSearch": [1], + "UserView": [1], + "UserViewPage": [1], + "UserPrivateKey": [1], + "NodeSettingsUpdate": [1], + "NodeSettings": [1], + "HTTPConnection": [1], + "PythonConnection": [1], + "DateTime": [1], + "BlobFile": [1], + "SecureFilePathLocation": [1], + "SeaweedSecureFilePathLocation": [1], + "BlobStorageEntry": [1], + "BlobStorageMetadata": [1], + "CreateBlobStorageEntry": [1], + "BlobRetrieval": [1], + "SyftObjectRetrieval": [1], + "BlobRetrievalByURL": [1], + "BlobDeposit": [1], + "WorkerSettings": [1], + "HTTPNodeRoute": [1], + "PythonNodeRoute": [1], + "EnclaveMetadata": [1], + "DataSubject": [1], + "DataSubjectCreate": [1], + "DataSubjectMemberRelationship": [1], + "Contributor": [1], + "MarkdownDescription": [1], + "Asset": [1], + "CreateAsset": [1], + "Dataset": [1], + "DatasetPageView": [1], + "CreateDataset": [1], + "ActionDataEmpty": [1], + "ActionFileData": [1], + "Action": [1], + "ActionObject": [1], + "AnyActionObject": [1], + "TwinObject": [1], + "Policy": [1], + "InputPolicy": [1], + "ExactMatch": [1], + "OutputHistory": [1], + "OutputPolicy": [1], + "OutputPolicyExecuteCount": [1], + "OutputPolicyExecuteOnce": [1], + "UserOutputPolicy": [1], + "UserInputPolicy": [1], + "UserPolicy": [1], + "SubmitUserPolicy": [1], + "UserCode": [1], + "SubmitUserCode": [1], + "UserCodeExecutionResult": [1], + "CodeHistory": [1], + "CodeHistoryView": [1], + "CodeHistoriesDict": [1], + "UsersCodeHistoriesDict": [1], + "NodePeer": [1], + "ProxyClient": [1], + "CommandReport": [1], + "CommandResult": [1], + "VPNClientConnection": [1], + "HeadscaleAuthToken": [1], + "TailscalePeer": [1], + "TailscaleStatus": [1], + "OnDiskBlobDeposit": [1], + "SeaweedFSBlobDeposit": [1], + "NumpyArrayObject": [1], + "NumpyScalarObject": [1], + "NumpyBoolObject": [1], + "PandasDataframeObject": [1], + "PandasSeriesObject": [1], + "ReplyNotification": [1], + "Notification": [1], + "CreateNotification": [1], + "Change": [1], + "ChangeStatus": [1], + "ActionStoreChange": [1], + "Request": [1], + "RequestInfo": [1], + "RequestInfoFilter": [1], + "SubmitRequest": [1], + "ObjectMutation": [1], + "EnumMutation": [1], + "UserCodeStatusChange": [1], + "ProjectEvent": [1], + "ProjectEventAddObject": [1], + "ProjectEventAddLink": [1], + "ProjectSubEvent": [1], + "ProjectThreadMessage": [1], + "ProjectMessage": [1], + "ProjectRequestResponse": [1], + "ProjectRequest": [1], + "AnswerProjectPoll": [1], + "ProjectPoll": [1], + "Project": [1], + "ProjectSubmit": [1], + "QueueItem": [1], + "ZMQClientConfig": [1], + "Plan": [1] + }, + "hash": "cywY0k3c9tIYHL3OnoiPEabwe2fyn6SvTxcG7zvwIPA=", + "supported": true + } +} diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 8f76f6de5be..061a9ac9ad5 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -52,6 +52,10 @@ PANDAS_DATA = f"{DATASETS_URL}/pandas_cookbook" +def get_env(key: str, default: Optional[Any] = None) -> Optional[str]: + return os.environ.get(key, default) + + def full_name_with_qualname(klass: type) -> str: """Returns the klass module name + klass qualname.""" try: From 7c5477354503ef4dda137ddaffc12250d817aca7 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 20 Sep 2023 18:07:35 +0530 Subject: [PATCH 19/67] update protocol logic to save object hash as part of data protocol --- .../syft/src/syft/protocol/data_protocol.py | 113 +++--- .../src/syft/protocol/protocol_state.json | 125 +------ .../src/syft/protocol/protocol_state_dev.json | 345 ++++++++++++------ 3 files changed, 299 insertions(+), 284 deletions(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 272c5705ab2..b966b5d8817 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -6,10 +6,13 @@ from pathlib import Path from typing import Any from typing import Dict +from typing import Type # relative +from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftMigrationRegistry from ..util.util import get_env +from ..util.util import index_syft_by_module_name from ..util.util import str_to_bool @@ -52,13 +55,28 @@ def __init__(self, filename: str) -> None: self.file_path = Path(data_protocol_dir()) / filename self.state = self.read_state() + @staticmethod + def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: + obj_meta_info = { + "canonical_name": klass.__canonical_name__, + "version": klass.__version__, + "unique_keys": getattr(klass, "__attr_unique__", []), + "field_data": klass.__fields__, + } + + return make_hash_sha256(obj_meta_info) + def calc_latest_object_versions(self): object_latest_version_map = {} - object_versions = iter( - SyftMigrationRegistry.__migration_version_registry__.items() - ) - for canonical_name, available_versions in object_versions: - object_latest_version_map[canonical_name] = list(available_versions) + migration_registry = SyftMigrationRegistry.__migration_version_registry__ + for canonical_name in migration_registry: + available_versions = migration_registry[canonical_name] + version_obj_hash_map = {} + for object_version, fqn in available_versions.items(): + object_klass = index_syft_by_module_name(fqn) + object_hash = self._calculate_object_hash(object_klass) + version_obj_hash_map[object_version] = object_hash + object_latest_version_map[canonical_name] = version_obj_hash_map return object_latest_version_map @@ -70,17 +88,20 @@ def save_state(self): def find_deleted_versions( self, - current_object_version_map: Dict, - new_object_version_map: Dict, + current_object_to_version_map: Dict, + new_object_to_version_map: Dict, ): - deleted_object_classes = set(current_object_version_map) - set( - new_object_version_map + deleted_object_classes = set(current_object_to_version_map) - set( + new_object_to_version_map ) deleted_versions_map = {} - for canonical_name, new_versions in new_object_version_map.items(): - current_versions = current_object_version_map.get(canonical_name) + for canonical_name, new_versions in new_object_to_version_map.items(): + current_versions = current_object_to_version_map.get( + canonical_name, + None, + ) if current_versions is None: continue @@ -89,15 +110,15 @@ def find_deleted_versions( return deleted_object_classes, deleted_versions_map - def compute_supported_protocol_states( + def recompute_supported_states( self, current_protocol_version: int, - new_object_version_map: Dict, + new_object_to_version_map: Dict, ): current_protocol_state = self.state[current_protocol_version] deleted_object_classes, deleted_versions_map = self.find_deleted_versions( current_protocol_state, - new_object_version_map=new_object_version_map, + new_object_to_version_map=new_object_to_version_map, ) for _, protocol_state in self.state.items(): @@ -131,37 +152,43 @@ def compute_supported_protocol_states( def state_defined(self): return len(self.state) > 0 + def upgrade(self): + object_to_version_map = self.calc_latest_object_versions() + new_protocol_hash = make_hash_sha256(object_to_version_map) + + if not self.state_defined: + new_protocol_version = 1 + else: + # Find the current version + current_protocol_version = sorted( + self.state.keys(), + reverse=True, + )[0] + + new_protocol_version = current_protocol_version + 1 + + current_protocol_state = self.state[current_protocol_version] + if current_protocol_state["hash"] == new_protocol_hash: + print("No change in schema. Skipping upgrade.") + return + + self.recompute_supported_states( + current_protocol_version=current_protocol_version, + new_object_to_version_map=object_to_version_map, + ) -def upgrade_protocol(): - data_protocol = DataProtocol(filename=data_protocol_file_name()) - - object_version_map = data_protocol.calc_latest_object_versions() - new_protocol_hash = make_hash_sha256(object_version_map) - - if not data_protocol.state_defined: - new_protocol_version = 1 - else: - # Find the current version - current_protocol_version = sorted( - data_protocol.state.keys(), - reverse=True, - )[0] + self.state[new_protocol_version] = { + "object_versions": object_to_version_map, + "hash": new_protocol_hash, + "supported": True, + } + self.save_state() - new_protocol_version = current_protocol_version + 1 - current_protocol_state = data_protocol.state[current_protocol_version] - if current_protocol_state["hash"] == new_protocol_hash: - print("No change in schema. Skipping upgrade.") - return +def upgrade_protocol(): + data_protocol = DataProtocol(filename=data_protocol_file_name()) + data_protocol.upgrade() - data_protocol.compute_supported_protocol_states( - current_protocol_version=current_protocol_version, - new_object_version_map=object_version_map, - ) - data_protocol.state[new_protocol_version] = { - "object_versions": object_version_map, - "hash": new_protocol_hash, - "supported": True, - } - data_protocol.save_state() +def validate_protocol(): + pass diff --git a/packages/syft/src/syft/protocol/protocol_state.json b/packages/syft/src/syft/protocol/protocol_state.json index cc95b47fb17..0967ef424bc 100644 --- a/packages/syft/src/syft/protocol/protocol_state.json +++ b/packages/syft/src/syft/protocol/protocol_state.json @@ -1,124 +1 @@ -{ - "1": { - "object_versions": { - "SyftObject": [1], - "PartialSyftObject": [1], - "NodeServiceContext": [1], - "AuthedServiceContext": [1], - "UnauthedServiceContext": [1], - "NodeMetadataUpdate": [1], - "NodeMetadata": [1, 2], - "LinkedObject": [1], - "NodeConnection": [1], - "APIEndpoint": [1], - "SignedSyftAPICall": [1], - "SyftAPICall": [1], - "SyftAPI": [1], - "User": [1], - "UserUpdate": [1], - "UserCreate": [1], - "UserSearch": [1], - "UserView": [1], - "UserViewPage": [1], - "UserPrivateKey": [1], - "NodeSettingsUpdate": [1], - "NodeSettings": [1], - "HTTPConnection": [1], - "PythonConnection": [1], - "DateTime": [1], - "BlobFile": [1], - "SecureFilePathLocation": [1], - "SeaweedSecureFilePathLocation": [1], - "BlobStorageEntry": [1], - "BlobStorageMetadata": [1], - "CreateBlobStorageEntry": [1], - "BlobRetrieval": [1], - "SyftObjectRetrieval": [1], - "BlobRetrievalByURL": [1], - "BlobDeposit": [1], - "WorkerSettings": [1], - "HTTPNodeRoute": [1], - "PythonNodeRoute": [1], - "EnclaveMetadata": [1], - "DataSubject": [1], - "DataSubjectCreate": [1], - "DataSubjectMemberRelationship": [1], - "Contributor": [1], - "MarkdownDescription": [1], - "Asset": [1], - "CreateAsset": [1], - "Dataset": [1], - "DatasetPageView": [1], - "CreateDataset": [1], - "ActionDataEmpty": [1], - "ActionFileData": [1], - "Action": [1], - "ActionObject": [1], - "AnyActionObject": [1], - "TwinObject": [1], - "Policy": [1], - "InputPolicy": [1], - "ExactMatch": [1], - "OutputHistory": [1], - "OutputPolicy": [1], - "OutputPolicyExecuteCount": [1], - "OutputPolicyExecuteOnce": [1], - "UserOutputPolicy": [1], - "UserInputPolicy": [1], - "UserPolicy": [1], - "SubmitUserPolicy": [1], - "UserCode": [1], - "SubmitUserCode": [1], - "UserCodeExecutionResult": [1], - "CodeHistory": [1], - "CodeHistoryView": [1], - "CodeHistoriesDict": [1], - "UsersCodeHistoriesDict": [1], - "NodePeer": [1], - "ProxyClient": [1], - "CommandReport": [1], - "CommandResult": [1], - "VPNClientConnection": [1], - "HeadscaleAuthToken": [1], - "TailscalePeer": [1], - "TailscaleStatus": [1], - "OnDiskBlobDeposit": [1], - "SeaweedFSBlobDeposit": [1], - "NumpyArrayObject": [1], - "NumpyScalarObject": [1], - "NumpyBoolObject": [1], - "PandasDataframeObject": [1], - "PandasSeriesObject": [1], - "ReplyNotification": [1], - "Notification": [1], - "CreateNotification": [1], - "Change": [1], - "ChangeStatus": [1], - "ActionStoreChange": [1], - "Request": [1], - "RequestInfo": [1], - "RequestInfoFilter": [1], - "SubmitRequest": [1], - "ObjectMutation": [1], - "EnumMutation": [1], - "UserCodeStatusChange": [1], - "ProjectEvent": [1], - "ProjectEventAddObject": [1], - "ProjectEventAddLink": [1], - "ProjectSubEvent": [1], - "ProjectThreadMessage": [1], - "ProjectMessage": [1], - "ProjectRequestResponse": [1], - "ProjectRequest": [1], - "AnswerProjectPoll": [1], - "ProjectPoll": [1], - "Project": [1], - "ProjectSubmit": [1], - "QueueItem": [1], - "ZMQClientConfig": [1], - "Plan": [1] - }, - "hash": "cywY0k3c9tIYHL3OnoiPEabwe2fyn6SvTxcG7zvwIPA=", - "supported": true - } -} +{} diff --git a/packages/syft/src/syft/protocol/protocol_state_dev.json b/packages/syft/src/syft/protocol/protocol_state_dev.json index cc95b47fb17..9900aae042d 100644 --- a/packages/syft/src/syft/protocol/protocol_state_dev.json +++ b/packages/syft/src/syft/protocol/protocol_state_dev.json @@ -1,124 +1,235 @@ { "1": { "object_versions": { - "SyftObject": [1], - "PartialSyftObject": [1], - "NodeServiceContext": [1], - "AuthedServiceContext": [1], - "UnauthedServiceContext": [1], - "NodeMetadataUpdate": [1], - "NodeMetadata": [1, 2], - "LinkedObject": [1], - "NodeConnection": [1], - "APIEndpoint": [1], - "SignedSyftAPICall": [1], - "SyftAPICall": [1], - "SyftAPI": [1], - "User": [1], - "UserUpdate": [1], - "UserCreate": [1], - "UserSearch": [1], - "UserView": [1], - "UserViewPage": [1], - "UserPrivateKey": [1], - "NodeSettingsUpdate": [1], - "NodeSettings": [1], - "HTTPConnection": [1], - "PythonConnection": [1], - "DateTime": [1], - "BlobFile": [1], - "SecureFilePathLocation": [1], - "SeaweedSecureFilePathLocation": [1], - "BlobStorageEntry": [1], - "BlobStorageMetadata": [1], - "CreateBlobStorageEntry": [1], - "BlobRetrieval": [1], - "SyftObjectRetrieval": [1], - "BlobRetrievalByURL": [1], - "BlobDeposit": [1], - "WorkerSettings": [1], - "HTTPNodeRoute": [1], - "PythonNodeRoute": [1], - "EnclaveMetadata": [1], - "DataSubject": [1], - "DataSubjectCreate": [1], - "DataSubjectMemberRelationship": [1], - "Contributor": [1], - "MarkdownDescription": [1], - "Asset": [1], - "CreateAsset": [1], - "Dataset": [1], - "DatasetPageView": [1], - "CreateDataset": [1], - "ActionDataEmpty": [1], - "ActionFileData": [1], - "Action": [1], - "ActionObject": [1], - "AnyActionObject": [1], - "TwinObject": [1], - "Policy": [1], - "InputPolicy": [1], - "ExactMatch": [1], - "OutputHistory": [1], - "OutputPolicy": [1], - "OutputPolicyExecuteCount": [1], - "OutputPolicyExecuteOnce": [1], - "UserOutputPolicy": [1], - "UserInputPolicy": [1], - "UserPolicy": [1], - "SubmitUserPolicy": [1], - "UserCode": [1], - "SubmitUserCode": [1], - "UserCodeExecutionResult": [1], - "CodeHistory": [1], - "CodeHistoryView": [1], - "CodeHistoriesDict": [1], - "UsersCodeHistoriesDict": [1], - "NodePeer": [1], - "ProxyClient": [1], - "CommandReport": [1], - "CommandResult": [1], - "VPNClientConnection": [1], - "HeadscaleAuthToken": [1], - "TailscalePeer": [1], - "TailscaleStatus": [1], - "OnDiskBlobDeposit": [1], - "SeaweedFSBlobDeposit": [1], - "NumpyArrayObject": [1], - "NumpyScalarObject": [1], - "NumpyBoolObject": [1], - "PandasDataframeObject": [1], - "PandasSeriesObject": [1], - "ReplyNotification": [1], - "Notification": [1], - "CreateNotification": [1], - "Change": [1], - "ChangeStatus": [1], - "ActionStoreChange": [1], - "Request": [1], - "RequestInfo": [1], - "RequestInfoFilter": [1], - "SubmitRequest": [1], - "ObjectMutation": [1], - "EnumMutation": [1], - "UserCodeStatusChange": [1], - "ProjectEvent": [1], - "ProjectEventAddObject": [1], - "ProjectEventAddLink": [1], - "ProjectSubEvent": [1], - "ProjectThreadMessage": [1], - "ProjectMessage": [1], - "ProjectRequestResponse": [1], - "ProjectRequest": [1], - "AnswerProjectPoll": [1], - "ProjectPoll": [1], - "Project": [1], - "ProjectSubmit": [1], - "QueueItem": [1], - "ZMQClientConfig": [1], - "Plan": [1] + "SyftObject": { "1": "YKNSMz5Mv3JP+agzpfG7//NdOC921hw08vgXOKTZhuQ=" }, + "PartialSyftObject": { + "1": "Y4P+AvGCmidZWfH5E3h6mAZybWgzR8Qo8yH/YKE6tBk=" + }, + "NodeServiceContext": { + "1": "4jBsFFBIU1k9b6Wg9Ts3QSGpaWcxuTBdoB08HIRziJE=" + }, + "AuthedServiceContext": { + "1": "ROZLMgphlmwrzdaqVp2FQy9VMlMUQoTwYG/I8cu6zYc=" + }, + "UnauthedServiceContext": { + "1": "nTnxoAksWxKO1hJ2VkAaDEhO/5S5wmtQ6MqkAnFhVqg=" + }, + "NodeMetadataUpdate": { + "1": "0uJFncAR+d7xTMmqUa5liNFwG9vuVVFBvmMYCmf520c=" + }, + "NodeMetadata": { + "1": "2jeTbKyEk9d9/aJGbDa7XhO2Va+AwvxXYRUhWmvhCa0=", + "2": "9j+mjonkZIHWI8QKqk8cze7fEYPYT2thN0e8kEsTN4w=" + }, + "LinkedObject": { "1": "xumvQkRL81oCIw7pCoImBypTc6NmGgAqYf5rfNC+JTk=" }, + "NodeConnection": { "1": "YdSexoenQRB80O6ISJaj+KLnrZjV6yFtMa2WnGQZK/s=" }, + "APIEndpoint": { "1": "GgbLF9vHJqkCQUwuzBUloG0uQsT6PWS3qgBk40VkC88=" }, + "SignedSyftAPICall": { + "1": "0n2Jmh7dLyoh81FfxK0gIKkp+4Amu4Hrb5qi5XcPD+E=" + }, + "SyftAPICall": { "1": "mBNhjZ9Smqkzk85RfCzt4/LAFLYn+AnL9OwFuqZRwvc=" }, + "SyftAPI": { "1": "kqlElFZum0tnAimmNIBPJQH/XdY4FQX3zOTrhfLrLu4=" }, + "User": { "1": "J+VKXh3/Vr+VrTKFqvyLNBS+ZstjMUKp+HZ/1Xj8ZE4=" }, + "UserUpdate": { "1": "+i3TS7vvVcH8qv9pk6693z+I7UuHBzw+2aHHoN/nMeg=" }, + "UserCreate": { "1": "Qb/uv7aNiSV8JCc/4kpu/TUrINoi/VyHItZR0qtj018=" }, + "UserSearch": { "1": "p3vPAxBqKkJHRrxwjbN1w3pVDydRnpGgC01SARK1xFE=" }, + "UserView": { "1": "jiwWOsYbk4lT3ojZHhWSpPNy+AC89Am18QWHmgB8+lE=" }, + "UserViewPage": { "1": "sTFHQd1NgnBqHo+v9y8Y4JFEYEOd/E9coyrJh8/CEQY=" }, + "UserPrivateKey": { "1": "go5Yai/AhgXdpujV3Rpp4ktWbqPPZNw5dnfpdp4dbzs=" }, + "NodeSettingsUpdate": { + "1": "r8UPbVHmnELUYMLpHITOFQUfGBscyN8eW2zQUew/heU=" + }, + "NodeSettings": { "1": "PG1lzAKRE9xcqun3oA/UQijbmP3Ky4y+W/9usVlsKH0=" }, + "HTTPConnection": { "1": "g0IjjFIhpLSaLQRGAmsKnsf6WnCQGBkn2I7RTWSCMhE=" }, + "PythonConnection": { + "1": "zp0njyhyHxPyRGIf8NHInpAxBl0heefw99lja1HktIQ=" + }, + "DateTime": { "1": "lgw5W+3uvTF7+Yf90TCm/ZH14sdQok//Fg1od3MEYHo=" }, + "BlobFile": { "1": "eMGHm8RwoxIYgApFjBQbIk10OHZzzg8iYueZuTHW3dA=" }, + "SecureFilePathLocation": { + "1": "RQ/ecdme/+BlMKeDzP13I5COZWE6Y9Z/Qa069pAjhB4=" + }, + "SeaweedSecureFilePathLocation": { + "1": "eUPv14r+G8ahuTxPyCRwGJohZszoVcIwTQwP9Ka1erQ=" + }, + "BlobStorageEntry": { + "1": "P5zOHxfAah/oDSI30EmwitHqr0AH6SFwblIqP12WwWI=" + }, + "BlobStorageMetadata": { + "1": "tRxxFD0aFQZIrFSwOvnKfgfi86XPe/jGbxIoERM3ORY=" + }, + "CreateBlobStorageEntry": { + "1": "rS/eIit7drIV90BwN+1+C7g6d84iuJWVTcnZlk4nxf8=" + }, + "BlobRetrieval": { "1": "CHBUBC0ui9KBmycpBb/ajpPjxx+4Tk996wp/AhL19hM=" }, + "SyftObjectRetrieval": { + "1": "oXaUpeMn3ZDdsCCA36TPvA0Gl1PhwYzLl83EqF8bnqw=" + }, + "BlobRetrievalByURL": { + "1": "Tulc9im06HsDR5nk7IQJA9dOjfMzrguhmccfINfY3Ew=" + }, + "BlobDeposit": { "1": "hzeQOsl5CLNJP3KqqVYjOtEmHIJEr3aOvC2a0OmasRA=" }, + "WorkerSettings": { "1": "lO0IIgJ6xd6dIoQ9NNOhzN1q1jCdJTmF1TPQyklsSt4=" }, + "HTTPNodeRoute": { "1": "VvXpjwKl7gnJ/Fp0k+Mpx9V9fzc4NVg3/jOh4+RF3PA=" }, + "PythonNodeRoute": { + "1": "e6ootD2xmyLa4gAlvUKMhi1s3Ga9ABr3aJm23zuxY3g=" + }, + "EnclaveMetadata": { + "1": "K6RyqYOV77ZPwi954HfrBdVBIuy8KoSKfr6U46CYa7A=" + }, + "DataSubject": { "1": "vb0kWLbURoD1CYVw58ZaltGfnAIlIXp7K7tj5gsVvyQ=" }, + "DataSubjectCreate": { + "1": "FZv2MaKkyJ9EZyzlx9ZP3+8MPFDE+G5aigv5ZMSXQks=" + }, + "DataSubjectMemberRelationship": { + "1": "VgQw34XsTIAqvfAv/P7OLwbl5dfz+8AookotabP8Yj0=" + }, + "Contributor": { "1": "B51N99UvLVA37uLKi7S+otHIvdtNpAc1nYHmwT+m/8I=" }, + "MarkdownDescription": { + "1": "Ey+ZtO5lR9DuiCm1Lzv6xnoy79shk5JclAY7HE+m1/w=" + }, + "Asset": { "1": "86T/V6DCjviie/Z7r0KrN2TKOTnY4cWeM458BlFU6/w=" }, + "CreateAsset": { "1": "LE0rScoj6BrnJspw4MFIi/GgHRwY4dp8O3rWfdBP+Vo=" }, + "Dataset": { "1": "Is1rUilFO3jnSEr2x8iYBm7b7tAbBN8Z8lbi43MECmA=" }, + "DatasetPageView": { + "1": "CuPRePPl6YiUGG10xv7+05QOqw5C5t8pgg2sPl3bwSk=" + }, + "CreateDataset": { "1": "Y4LSVY63dtpFuOh9DXlCyb9GsHQSAnuqd2Z+49rRbNw=" }, + "ActionDataEmpty": { + "1": "bgFSnXAP5Y5MDp2m12KmAYz3BYizQbQM6bt8+SV0nyo=" + }, + "ActionFileData": { "1": "pz2pLlCxo+YOKV/IvUGj9OE8iq7ioM+jh01yjxkoA7k=" }, + "Action": { "1": "zDzMNUSHGZl4vb83VNC2rWbJDZr3Bv+4IDH4TZ0A/Fc=" }, + "ActionObject": { "1": "LtfQw1cBX2hw/jYCKi9KqbBjTHLAqQPSWlnKEEBfCio=" }, + "AnyActionObject": { + "1": "fzK4maUmKiy4S2rV0vZ7VqALDXfprO8QauGZF2oOWLY=" + }, + "TwinObject": { "1": "46fPSjqUeIm1X0AEw1s/t6do9ZiZfNblXExnMP6PdYA=" }, + "Policy": { "1": "Fm9nQqoJ5a8mDsClFHjRtxUEEZfKnukmyKOwk8GGiOo=" }, + "InputPolicy": { "1": "xGLFa6azUOLtDZhhHtg0AEQKZodjDqMmkFSPXGyioHk=" }, + "ExactMatch": { "1": "Ryz1Z/oaFkpVghaVhWITouMQc5SaBnV8vdy/biMzcM0=" }, + "OutputHistory": { "1": "Aooo3q0QwZC9ITD5uPfryjzXN7cDqHaI3CpuYbP6qKc=" }, + "OutputPolicy": { "1": "EVnZO8ANUo9xD8Ms00rK17cQlbPS4wetsLUdNVHNW1Y=" }, + "OutputPolicyExecuteCount": { + "1": "tv9+lELPKGymvNsIeuqYcLfh4dcqnOzJt6cp9V5HZUo=" + }, + "OutputPolicyExecuteOnce": { + "1": "42Ag72zfZGUtRhD69GHTt3Ao17ONWAZv8GUtTYFVamk=" + }, + "UserOutputPolicy": { + "1": "ez36vPaSC0DN+cRGXyI/1IycwNhfWr4MI6WsPYh/+1M=" + }, + "UserInputPolicy": { + "1": "RGI6BmZqb0HjouCRLVoP0oTy+9CjfBuLiV3RmC4eyRk=" + }, + "UserPolicy": { "1": "MSC0EODy/AABI6A4KoRYAI8wll1lCPLIrspm/vgLbx8=" }, + "SubmitUserPolicy": { + "1": "OvMf5QQ54LApN/3OTihJ8sXXVt1Ob8F5I54Agpy6E0I=" + }, + "UserCode": { "1": "zflEdjmNBSo313bbafO72GwSi8oef6K1FeTmUvfhDFQ=" }, + "SubmitUserCode": { "1": "ffMpFbS2ORw8DVKaQAo2Qgax38XeW0IcNSa0KRq6IUg=" }, + "UserCodeExecutionResult": { + "1": "/lOU4pxbpyrCchNiXWbpUfznhAZdUFDvrn28aydxwEs=" + }, + "CodeHistory": { "1": "7I4gbWRYN3FY5uXB9oigPE/JIvn+ae2e4W105F48QEo=" }, + "CodeHistoryView": { + "1": "SrcXTEpiVftCmmK4FdNxrAXvT7GHDC0RoitwrJ2vUXQ=" + }, + "CodeHistoriesDict": { + "1": "UrL8zC+enllTYs+i/yfkjA1jhIRKYDI2CnBPAvlb228=" + }, + "UsersCodeHistoriesDict": { + "1": "3wfodj9HAWVhzHtxWBWVcFSA+bX7XO8Uqld3Is/RwtY=" + }, + "NodePeer": { "1": "v79RAuN61iLzNFaTxKhRSrMq3AMT6GzOBTBHxjc5er8=" }, + "ProxyClient": { "1": "PpgG6U72PgZ6zOzvJG4/hGzXYZZPsZVb9lQbDhdgM3A=" }, + "CommandReport": { "1": "3IokYf9jUF8uFIsUyPdCg2GoypLzCU0Im1fSPysVM10=" }, + "CommandResult": { "1": "e3l3u85Fe3Dnu1kPpTd38VjrGDCZN47IFo5iDStnJv0=" }, + "VPNClientConnection": { + "1": "HWV84pQiRBpeb+lcvIxWn+5D2ySto4bNOGX21WqfJTg=" + }, + "HeadscaleAuthToken": { + "1": "x+6INRwt6luuqI2d1NYyMQMMXjBovQ9+bkZiffFbNEQ=" + }, + "TailscalePeer": { "1": "vCubjOmd0lpfC9CT4+MV1DGAg2nDzUZ29zAaawr6p9A=" }, + "TailscaleStatus": { + "1": "br3ZMac7cI4n7pxD5S7lOpZ/2uoW8rFi2MmpDbgcGAU=" + }, + "OnDiskBlobDeposit": { + "1": "jjJZl1vrOaO2CzBzP20s4D/q0Qe8PumsiyVa/FL2pqE=" + }, + "SeaweedFSBlobDeposit": { + "1": "4bRLAUv2PMXdGnt/7yPp9MuZCMGKhq6qQ8MZbOkHH2I=" + }, + "NumpyArrayObject": { + "1": "3zbjWk0zX9PxbURLX3ShoVgCQOI81bfjYhIdaZ7HuMU=" + }, + "NumpyScalarObject": { + "1": "IYG6Da3/CLgz0gerP9Ci2yJrjWONdPhNrFf8wOeZ/uA=" + }, + "NumpyBoolObject": { + "1": "YIJ2Xmk7JnkGHAynY6HYCXOoKHJ9EYq7CZjNa3YdOWw=" + }, + "PandasDataframeObject": { + "1": "uX4KTvRFIAlVwHfo3ZN34KSxf4MknVT8PTwQeMQJWrU=" + }, + "PandasSeriesObject": { + "1": "/fq5bJw6oiyJHgDMx8TBCSA2mZZBfBWcVbSRJfA/cmA=" + }, + "ReplyNotification": { + "1": "Efh1C41x/1EtwQZFRMa6UpvxNZtykQjnqNNGenpmDUE=" + }, + "Notification": { "1": "wUCtMuph8YS2oGIgPUi5v+d1KDW8YZA+/E1sV01Lyd8=" }, + "CreateNotification": { + "1": "ROov4hDVjA+Z95CXBmiLbQxyVqmEClDP3UqOfw6PmzY=" + }, + "Change": { "1": "NZWmhd9R8EefaKi+TTYYyLmBrOm3O1Yct9KHGliFDYo=" }, + "ChangeStatus": { "1": "AfAOdUeFuiQ+aDGNchAJdndMMDCKJZPm3SQo0jxMYAE=" }, + "ActionStoreChange": { + "1": "5OP7I4PrGl14pve4o2XZHNbYIcukfZnm1wQ1uLKZVyw=" + }, + "Request": { "1": "QSQeT4yJHe3+wxuu7nGzdDBNHr50f1Ip4Cjf/9EP7gA=" }, + "RequestInfo": { "1": "5D+tU0qoSCdTw7FFk+AF3avu8NW/DqmpMW1ZC7vvjhM=" }, + "RequestInfoFilter": { + "1": "wZ8Jj3mfrh0HStaDC9TtqP4QeemCNI/eHRcvEZSJAng=" + }, + "SubmitRequest": { "1": "h9E55cpe7j2zy/PwEZ5xaLJkYj7xFINLqqVRa09w5nA=" }, + "ObjectMutation": { "1": "EwPhg9E0wYjsWzDYTMZSGkJWSkoiaGoo039K6dLoPkQ=" }, + "EnumMutation": { "1": "7/qPjBhnbmzXNV336VquEPUQVAR6gCVK+GfCHe+7sSo=" }, + "UserCodeStatusChange": { + "1": "/eWpYpn3Mc8rE9OvaXI+VDjYKCOurvii2DRnl0UTZsA=" + }, + "ProjectEvent": { "1": "WzGXlQZ+ol/VjFJQIKUBUGpMcfwQYmCyAIJQYAn7Mco=" }, + "ProjectEventAddObject": { + "1": "mERxpyorW4sE8pD7uC0ZZVBrmTmc5nyiVIyGMfclxkI=" + }, + "ProjectEventAddLink": { + "1": "10IEtfP2Czcmj7EwMzcudPsvJ98AI8O/CIPyU/LKcss=" + }, + "ProjectSubEvent": { + "1": "biKRZDDymRIhp1jkG++TDHFpP99fXTDt/EksSmIF1PA=" + }, + "ProjectThreadMessage": { + "1": "oy8Bi5hXE0kKFe1LS9nztTqfdKT1BteE2Twhv87kHrk=" + }, + "ProjectMessage": { "1": "DHi5Uu5OkShhuGhD+x3x7WLYqV0phkzMXi+BN/8/zAM=" }, + "ProjectRequestResponse": { + "1": "9lQqMZoPHZUgm64VeGOxb/vpAM+sqjr9xfl9NsvrIvo=" + }, + "ProjectRequest": { "1": "yaiksOZaNRotCvO9kueF6JJkJI8nDN3bsoJ0k3wVqzU=" }, + "AnswerProjectPoll": { + "1": "GHLwVGsp0Z6dKtNDmPzX3XMvOAo0O2Qkde1ZGK4hLEQ=" + }, + "ProjectPoll": { "1": "qhkA5vgUMx+vk0TB1JAO73HXpFaqLzvNgzeN4WhprBQ=" }, + "Project": { "1": "tKhBH5IMhBm5zhnWIVjVsYEpWDMaeFd2rarja9A0Zms=" }, + "ProjectSubmit": { "1": "2Wh5J0B3lpFAPREF4HOkYeytjF6JERwAmNGeAaCRgag=" }, + "QueueItem": { "1": "pil5vSG1vRPWIKCK6dtwLeR4EVhrAcsgZRDwtdpBwwo=" }, + "ZMQClientConfig": { + "1": "i4LNWTxwpNGlHCqk16BSqDwUZcus3V76WeE+IDbL9g8=" + }, + "Plan": { "1": "W/HvkLL9/FwwPdANAl8WOJ/Ib1x2UkdR0utu3T84byU=" } }, - "hash": "cywY0k3c9tIYHL3OnoiPEabwe2fyn6SvTxcG7zvwIPA=", + "hash": "NblGKIJFliKGb85F1K3f9sLThcZSAqVDp2RUlxyI9Jc=", "supported": true } } From 13691cb9212a28fe396b8ad30ad55aa116ed0792 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 20 Sep 2023 18:42:03 +0530 Subject: [PATCH 20/67] update notebook --- notebooks/Experimental/Data Migration.ipynb | 63 ++++++++++++++------- 1 file changed, 42 insertions(+), 21 deletions(-) diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index 5bf8e24ee11..c47b27c15a2 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -546,8 +546,8 @@ "ProtocolVersionMap\n", "{\n", " 1: {“User”: 1, …., “NodeMetadata”: 1..},\n", - " 2: {“User”: 1, …., “NodeMetadata”: 3..},\n", - " 3: {“User”: 2, …., “NodeMetadata”: 3..},\n", + " 2: {“User”: 1, …., “NodeMetadata”: [1, 2, 3]..},\n", + " 3: {“User”: [1, 2], …., “NodeMetadata”: [1, 2, 3]..},\n", " 4: {\"NodeMetdata\": 3},\n", "}\n", "\n", @@ -556,7 +556,7 @@ "Client\n", "{\n", " 1: {“User”: 1, …..“NodeMetadata”: 1..}, -> derived from the SyftMigrationRegistry\n", - " 2: {“User”: 1, …..“NodeMetadata”: 3..},\n", + " 2: {“User”: 1, …..“NodeMetadata”: [1, 2,3]..},\n", "}\n", "\n", "client protocol versions -> [1, 2]\n", @@ -565,6 +565,7 @@ "communication protocol: 2 -> Highest of the intersection of server and client\n", " \n", "\n", + "ProtocolVersionMap\n", "{\n", " 4: {\"NodeMetdata\": 3},\n", "}\n", @@ -630,32 +631,30 @@ "1: {\n", " \"hash\": \"effasfa\"\n", " \"object_versions\": {\"User\": 1, \"NodeMetadata\": [1]},\n", - " \"stale\": True\n", + " \"supported\": True\n", "},\n", "2: {\n", - " \"builds_on\": \n", " \"hash\": \"12dsad4\",\n", " \"object_versions\": {\"User\": 1, \"NodeMetadata\": [1, 2]},\n", - " \"stale\": True\n", + " \"supported\": True\n", "},\n", "3: {\n", - " \"builds_on\": \n", " \"hash\": \"5235sad4\",\n", " \"object_versions\": {\"User\": [1, 2], \"NodeMetadata\": [1,2,3]},\n", - " \"stale\": True\n", + " \"supported\": False\n", "},\n", "4: {\n", - " \"builds_on\": \n", " \"hash\": \"5235sad4\",\n", " \"object_versions\": {\"User\": [1], \"NodeMetadata\": [1,2,3]},\n", - " \"stale\": True\n", - "}\n", - "5: {\n", - " \"hash\": \"asd23144\",\n", - " \"object_versions\": {\"NodeMetadata\": [1, 2, 3]},\n", - " \"stale\": False\n", - " \n", + " \"supported\": False\n", "}\n", + "# 5: {\n", + "# \"hash\": \"asd23144\",\n", + "# \"object_versions\": {\"NodeMetadata\": [1, 2, 3]},\n", + "# \"stale\": False \n", + "# }\n", + "\n", + "\n", " \n", "client: [1, 2, 3, 4]\n", "server: [5]\n", @@ -694,7 +693,8 @@ " \"User\": {1: \"\", 2: \"\"}\n", " }\n", "# >> sy.reset_protocol()\n", - ">> sy.update_protocol()\n", + "# >> sy.update_protocol()\n", + "# >> sy.update_protocol()\n", "\n", "\n", "protocol_state.json\n", @@ -718,7 +718,7 @@ "4: {\n", "\n", " \"hash\": \"5235sad4\",\n", - " \"object_versions\": {\"User\": [1], \"NodeMetadata\": [1,2,3]},\n", + " \"object_versions\": {\"User\": [2], \"NodeMetadata\": [1,2,3]},\n", " \"stale\": False\n", "}\n", "5: {\n", @@ -759,7 +759,18 @@ "id": "fe2adf3f", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# \n", + "\n", + "class Weird {\n", + "name: str\n", + "data: TFTensor\n", + "}\n", + "class Weird {\n", + "name: str\n", + "data: bytes\n", + "}" + ] }, { "cell_type": "code", @@ -767,7 +778,12 @@ "id": "d5190d7d", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "## Increasing versions\n", + "# hash comparision during start up\n", + "# pick the highest compatible protocol\n", + "# protocol.dev.json" + ] }, { "cell_type": "code", @@ -775,7 +791,12 @@ "id": "4ed6932c", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "SyftMigrationState:\n", + " current_version\n", + " canonical_name\n", + " protocol_version" + ] } ], "metadata": { From 769d67db6ab03781fc7e00b0a0933779863c151e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Sep 2023 11:27:33 +0530 Subject: [PATCH 21/67] import upgrade_protocol method to root --- packages/syft/src/syft/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 062d6edb25a..b47e729e3a6 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -32,6 +32,7 @@ from .node.server import serve_node # noqa: F401 from .node.server import serve_node as bind_worker # noqa: F401 from .node.worker import Worker # noqa: F401 +from .protocol.data_protocol import upgrade_protocol # noqa: F401 from .serde import NOTHING # noqa: F401 from .serde.deserialize import _deserialize as deserialize # noqa: F401 from .serde.serializable import serializable # noqa: F401 From 3d63c93b42d6b0be76485f9d2d43016c11d750e4 Mon Sep 17 00:00:00 2001 From: khoaguin Date: Thu, 21 Sep 2023 13:17:16 +0700 Subject: [PATCH 22/67] add some doc strings and type annotations --- packages/syft/src/syft/types/syft_object.py | 24 +++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index ca817979be2..69c02593308 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -183,6 +183,17 @@ class SyftMigrationRegistry: __migration_transform_registry__: Dict[str, Dict[str, Callable]] = {} def __init_subclass__(cls, **kwargs: Any) -> None: + """ + Populate the `__migration_version_registry__` dictionary with format + __migration_version_registry__ = { + "canonical_name": {version_number: "klass_name"} + } + For example + __migration_version_registry__ = { + 'APIEndpoint': {1: 'syft.client.api.APIEndpoint'}, + 'Action': {1: 'syft.service.action.action_object.Action'}, + } + """ super().__init_subclass__(**kwargs) klass = type(cls) if not isinstance(cls, type) else cls @@ -204,8 +215,8 @@ def __init_subclass__(cls, **kwargs: Any) -> None: } @classmethod - def get_versions(cls, canonical_name: str) -> List: - available_versions = cls.__migration_version_registry__.get( + def get_versions(cls, canonical_name: str) -> List[int]: + available_versions: Dict = cls.__migration_version_registry__.get( canonical_name, {}, ) @@ -215,6 +226,15 @@ def get_versions(cls, canonical_name: str) -> List: def register_transform( cls, klass_type_str: str, version_from: int, version_to: int, method: Callable ) -> None: + """ + Populate the __migration_transform_registry__ dictionary with format + __migration_version_registry__ = { + "canonical_name": {"version_from x version_to": } + } + For example + {'NodeMetadata': {'1x2': , + '2x1': }} + """ if klass_type_str not in cls.__migration_version_registry__: raise Exception(f"{klass_type_str} is not yet registered.") From f095a15c403e0a87be682d6758a53fc3fbb6823e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Sep 2023 13:49:14 +0530 Subject: [PATCH 23/67] - fix hashing in protocol upgrade - fix casting of protocol version --- .../syft/src/syft/protocol/data_protocol.py | 35 +- .../src/syft/protocol/protocol_state_dev.json | 362 ++++++++++++------ 2 files changed, 255 insertions(+), 142 deletions(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index b966b5d8817..acaa5348364 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -1,11 +1,10 @@ # stdlib -import base64 import hashlib import json import os from pathlib import Path -from typing import Any from typing import Dict +from typing import Hashable from typing import Type # relative @@ -32,22 +31,8 @@ def data_protocol_dir(): return os.path.abspath(str(Path(__file__).parent)) -def make_hash_sha256(obj_to_hash: Any) -> str: - def make_hashable(obj): - if isinstance(obj, (tuple, list)): - return tuple(make_hashable(e) for e in obj) - - if isinstance(obj, dict): - return tuple(sorted((k, make_hashable(v)) for k, v in obj.items())) - - if isinstance(obj, (set, frozenset)): - return tuple(sorted(make_hashable(e) for e in obj)) - - return obj - - hasher = hashlib.sha256() - hasher.update(repr(make_hashable(obj_to_hash)).encode()) - return base64.b64encode(hasher.digest()).decode() +def make_hash_sha256(hashable: Hashable) -> str: + return hashlib.sha256(bytes(hash(hashable))).hexdigest() class DataProtocol: @@ -57,14 +42,18 @@ def __init__(self, filename: str) -> None: @staticmethod def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: + field_data = { + field_name: hash(model_field.annotation) + for field_name, model_field in klass.__fields__.items() + } obj_meta_info = { "canonical_name": klass.__canonical_name__, "version": klass.__version__, "unique_keys": getattr(klass, "__attr_unique__", []), - "field_data": klass.__fields__, + "field_data": field_data, } - return make_hash_sha256(obj_meta_info) + return hashlib.sha256(json.dumps(obj_meta_info).encode()).hexdigest() def calc_latest_object_versions(self): object_latest_version_map = {} @@ -154,7 +143,9 @@ def state_defined(self): def upgrade(self): object_to_version_map = self.calc_latest_object_versions() - new_protocol_hash = make_hash_sha256(object_to_version_map) + new_protocol_hash = hashlib.sha256( + json.dumps(object_to_version_map).encode() + ).hexdigest() if not self.state_defined: new_protocol_version = 1 @@ -165,7 +156,7 @@ def upgrade(self): reverse=True, )[0] - new_protocol_version = current_protocol_version + 1 + new_protocol_version = int(current_protocol_version) + 1 current_protocol_state = self.state[current_protocol_version] if current_protocol_state["hash"] == new_protocol_hash: diff --git a/packages/syft/src/syft/protocol/protocol_state_dev.json b/packages/syft/src/syft/protocol/protocol_state_dev.json index 9900aae042d..fbfb208c3bf 100644 --- a/packages/syft/src/syft/protocol/protocol_state_dev.json +++ b/packages/syft/src/syft/protocol/protocol_state_dev.json @@ -1,235 +1,357 @@ { "1": { "object_versions": { - "SyftObject": { "1": "YKNSMz5Mv3JP+agzpfG7//NdOC921hw08vgXOKTZhuQ=" }, + "SyftObject": { + "1": "3e4451972033459d3292997b6645b4762214d554ed290e4d629542dcf5a6e90d" + }, "PartialSyftObject": { - "1": "Y4P+AvGCmidZWfH5E3h6mAZybWgzR8Qo8yH/YKE6tBk=" + "1": "2bc6d28d8ae433410b12733b7a7a3d0943933aa7ca529a2c96bfb82370490214" }, "NodeServiceContext": { - "1": "4jBsFFBIU1k9b6Wg9Ts3QSGpaWcxuTBdoB08HIRziJE=" + "1": "457c30b8d6cca2db97a839b6d9e414c3d810d8c0100fa83fecaeac3c4cc760ae" }, "AuthedServiceContext": { - "1": "ROZLMgphlmwrzdaqVp2FQy9VMlMUQoTwYG/I8cu6zYc=" + "1": "f4a5acf9f2efd050b6ac218f4741fae7b6537cb79bf56be06ef03a272ca26bd5" }, "UnauthedServiceContext": { - "1": "nTnxoAksWxKO1hJ2VkAaDEhO/5S5wmtQ6MqkAnFhVqg=" + "1": "3d23424d2e55cb982d1904531f731d793d7cb5e2beebec67014cfcb9d5cb5c92" }, "NodeMetadataUpdate": { - "1": "0uJFncAR+d7xTMmqUa5liNFwG9vuVVFBvmMYCmf520c=" + "1": "47448bdb770590b51883b38f3e1037a433c89d09ace0b5ff0aca59837cde319c" }, "NodeMetadata": { - "1": "2jeTbKyEk9d9/aJGbDa7XhO2Va+AwvxXYRUhWmvhCa0=", - "2": "9j+mjonkZIHWI8QKqk8cze7fEYPYT2thN0e8kEsTN4w=" + "1": "99298e7f56a7f1dc654efbf7eb5e27e763b77b9ac82f16effd4f0a513864cb09", + "2": "a2aca5be420371005a87051f5d25d9fff574484525efc0c5403dff6de210b812" + }, + "LinkedObject": { + "1": "32ed44724ac6ac63f07e89a900e3ea7f0df55b99cec649d2d7b0b629d82bbee5" + }, + "NodeConnection": { + "1": "8b053d85cf16fb1acb9681cf9036e521530ff459af84351c45d0f9344fc588c1" + }, + "APIEndpoint": { + "1": "241d24a9b4116d25990edc5bb51351bb8657e61afe8b1f2ce93e09ba246adc66" }, - "LinkedObject": { "1": "xumvQkRL81oCIw7pCoImBypTc6NmGgAqYf5rfNC+JTk=" }, - "NodeConnection": { "1": "YdSexoenQRB80O6ISJaj+KLnrZjV6yFtMa2WnGQZK/s=" }, - "APIEndpoint": { "1": "GgbLF9vHJqkCQUwuzBUloG0uQsT6PWS3qgBk40VkC88=" }, "SignedSyftAPICall": { - "1": "0n2Jmh7dLyoh81FfxK0gIKkp+4Amu4Hrb5qi5XcPD+E=" - }, - "SyftAPICall": { "1": "mBNhjZ9Smqkzk85RfCzt4/LAFLYn+AnL9OwFuqZRwvc=" }, - "SyftAPI": { "1": "kqlElFZum0tnAimmNIBPJQH/XdY4FQX3zOTrhfLrLu4=" }, - "User": { "1": "J+VKXh3/Vr+VrTKFqvyLNBS+ZstjMUKp+HZ/1Xj8ZE4=" }, - "UserUpdate": { "1": "+i3TS7vvVcH8qv9pk6693z+I7UuHBzw+2aHHoN/nMeg=" }, - "UserCreate": { "1": "Qb/uv7aNiSV8JCc/4kpu/TUrINoi/VyHItZR0qtj018=" }, - "UserSearch": { "1": "p3vPAxBqKkJHRrxwjbN1w3pVDydRnpGgC01SARK1xFE=" }, - "UserView": { "1": "jiwWOsYbk4lT3ojZHhWSpPNy+AC89Am18QWHmgB8+lE=" }, - "UserViewPage": { "1": "sTFHQd1NgnBqHo+v9y8Y4JFEYEOd/E9coyrJh8/CEQY=" }, - "UserPrivateKey": { "1": "go5Yai/AhgXdpujV3Rpp4ktWbqPPZNw5dnfpdp4dbzs=" }, + "1": "3ea546d2e55d7f3f26c7590240588304b07c342de750b8fa55da0ebae19f5439" + }, + "SyftAPICall": { + "1": "041851168f212a0faf691862c11b805238047d07c3e44f31f79e0f31e7b0679e" + }, + "SyftAPI": { + "1": "946d442e4d0b5bf31f8c0de6d8e022693d41c7a440f6c8429ef40758d404513e" + }, + "User": { + "1": "1aa690bb2163536a2aebb6de73676e1b504bec744ab82a8dfabdad2ebd184d5f" + }, + "UserUpdate": { + "1": "7ec9f3d5103ea8ed08268f17f095e2172d0e9e3bb696a7266f86831f97f00bd0" + }, + "UserCreate": { + "1": "152991d21a29e7014001d77be45aa2a83ffb855464478eeb9e99712b9b9c1d3e" + }, + "UserSearch": { + "1": "ef1833ade6c67812702ac9ddf6d6ac61192cf962d6ec0232e8d593add7273d96" + }, + "UserView": { + "1": "6bc4efdad3a28e009a12f3a865cbc11c77b187fdc4c8174e28d735e28d1a9af0" + }, + "UserViewPage": { + "1": "78feb1d73aec6db844f2716d590701e082f021f48ba5852bb86781f77c0bdaeb" + }, + "UserPrivateKey": { + "1": "29547db4f2f1c3d10f35ce04adc7e4ca00f6e9041a6fce922da97ab5c43f1a18" + }, "NodeSettingsUpdate": { - "1": "r8UPbVHmnELUYMLpHITOFQUfGBscyN8eW2zQUew/heU=" + "1": "3adf1ca892f50565c57503843ffd846093a74a0c09c40317d022e3cf2075cc7b" + }, + "NodeSettings": { + "1": "74a99ed759776b9625f3b0db2b15fcae62d07712af6c666cc533278e6eb5fe6f" + }, + "HTTPConnection": { + "1": "19d5ea982ea36531feb3ce932661a48f76268d45fe66a872c9744a5928f352e8" }, - "NodeSettings": { "1": "PG1lzAKRE9xcqun3oA/UQijbmP3Ky4y+W/9usVlsKH0=" }, - "HTTPConnection": { "1": "g0IjjFIhpLSaLQRGAmsKnsf6WnCQGBkn2I7RTWSCMhE=" }, "PythonConnection": { - "1": "zp0njyhyHxPyRGIf8NHInpAxBl0heefw99lja1HktIQ=" + "1": "16b19771638bdd4810da5c9cba81c63a738840483c9219c077d280e201972f7b" + }, + "DateTime": { + "1": "2de23e90acf8db16b89d64dbeefedbf14dcd8829fd0d969bd572f8211271ef5e" + }, + "BlobFile": { + "1": "b3d133f06c4817e4bb881322881f07c4567e931d194d2a116fbce3c191431fa7" }, - "DateTime": { "1": "lgw5W+3uvTF7+Yf90TCm/ZH14sdQok//Fg1od3MEYHo=" }, - "BlobFile": { "1": "eMGHm8RwoxIYgApFjBQbIk10OHZzzg8iYueZuTHW3dA=" }, "SecureFilePathLocation": { - "1": "RQ/ecdme/+BlMKeDzP13I5COZWE6Y9Z/Qa069pAjhB4=" + "1": "2827f67e013eac3ccfd1a54300091d385765f144ad375c5d5b3eaf3679825c7b" }, "SeaweedSecureFilePathLocation": { - "1": "eUPv14r+G8ahuTxPyCRwGJohZszoVcIwTQwP9Ka1erQ=" + "1": "b2e3cf9fe876e61979323203b5fd880330662b30910f0077b667fdc7cb0a441a" }, "BlobStorageEntry": { - "1": "P5zOHxfAah/oDSI30EmwitHqr0AH6SFwblIqP12WwWI=" + "1": "f20a0e15c251f3822ba35f13125ba96c895d1e55eeb7b8191ff98b84ac70ee31" }, "BlobStorageMetadata": { - "1": "tRxxFD0aFQZIrFSwOvnKfgfi86XPe/jGbxIoERM3ORY=" + "1": "bf2f0769aa0a18b5d28d73c676288a15507e8c00b0325a80b38870a35337a17a" }, "CreateBlobStorageEntry": { - "1": "rS/eIit7drIV90BwN+1+C7g6d84iuJWVTcnZlk4nxf8=" + "1": "f0a6ddd17072e8e69f6d65f851d3f029052f7ebb170432769d46fab25498a662" + }, + "BlobRetrieval": { + "1": "a6c9369a6a8208ceccb71ca29a3391546aab3040c2088860b862d8d95f4f4acb" }, - "BlobRetrieval": { "1": "CHBUBC0ui9KBmycpBb/ajpPjxx+4Tk996wp/AhL19hM=" }, "SyftObjectRetrieval": { - "1": "oXaUpeMn3ZDdsCCA36TPvA0Gl1PhwYzLl83EqF8bnqw=" + "1": "238c5fbe27427ab9214370954053b3d0198c97497ba5c8b990782abb9f941615" }, "BlobRetrievalByURL": { - "1": "Tulc9im06HsDR5nk7IQJA9dOjfMzrguhmccfINfY3Ew=" + "1": "8d7d5d4bd624e33bf214dd8cc9b453edda1e586288b990a4abbd3ce58de10485" + }, + "BlobDeposit": { + "1": "d5fc7d669be5847ad5847c9e89d06ec37b65d31e29063437fb3ce187d05517bc" + }, + "WorkerSettings": { + "1": "312f82db87d952c5b7815757d9b86fd4b348fb32d25cab2d32169b97b9a0c145" + }, + "HTTPNodeRoute": { + "1": "3c9790fcc8bd19a6e439144f2bf651a3e6a1835663e592800c8df19a591489f0" }, - "BlobDeposit": { "1": "hzeQOsl5CLNJP3KqqVYjOtEmHIJEr3aOvC2a0OmasRA=" }, - "WorkerSettings": { "1": "lO0IIgJ6xd6dIoQ9NNOhzN1q1jCdJTmF1TPQyklsSt4=" }, - "HTTPNodeRoute": { "1": "VvXpjwKl7gnJ/Fp0k+Mpx9V9fzc4NVg3/jOh4+RF3PA=" }, "PythonNodeRoute": { - "1": "e6ootD2xmyLa4gAlvUKMhi1s3Ga9ABr3aJm23zuxY3g=" + "1": "5f12a1198e41028c15ab6109082938e26f088f104ac4cc70b4b70f14b43bae69" }, "EnclaveMetadata": { - "1": "K6RyqYOV77ZPwi954HfrBdVBIuy8KoSKfr6U46CYa7A=" + "1": "a62e0bc349049c704bd3bf54dd5b4c57996ce965d23b4e70a52e83f67efe0417" + }, + "DataSubject": { + "1": "24156a8cd0c672409bad05645f6507549d26c068922fbad3f9e90b0404b81b17" }, - "DataSubject": { "1": "vb0kWLbURoD1CYVw58ZaltGfnAIlIXp7K7tj5gsVvyQ=" }, "DataSubjectCreate": { - "1": "FZv2MaKkyJ9EZyzlx9ZP3+8MPFDE+G5aigv5ZMSXQks=" + "1": "75055403f67e45663cb9a34d3b9038a6d377b06a2aad0695f92e878dbc119694" }, "DataSubjectMemberRelationship": { - "1": "VgQw34XsTIAqvfAv/P7OLwbl5dfz+8AookotabP8Yj0=" + "1": "dec77a2cb0aa2b126e86e95f5e8ca9b37263a9ab5084d208021c3afe6a58020e" + }, + "Contributor": { + "1": "b05268de04c54886b884b89a55d284fb26358e3f40c79d9b117780cafe5881be" }, - "Contributor": { "1": "B51N99UvLVA37uLKi7S+otHIvdtNpAc1nYHmwT+m/8I=" }, "MarkdownDescription": { - "1": "Ey+ZtO5lR9DuiCm1Lzv6xnoy79shk5JclAY7HE+m1/w=" + "1": "abd4fdf31bd925fd899b665ef97f6710d4223dcfe49dda42f351d165581fb9e9" + }, + "Asset": { + "1": "a09f6f43b999c2ecf714f7b10a6a58bb2fe76fd4bdab43b1d17d7c2d93213b44" + }, + "CreateAsset": { + "1": "832f8a9bd62bf7938afa987a250dc635eb9d48ef874183e1b2ed49ece4888a9d" + }, + "Dataset": { + "1": "20b396058fbee41b3d3366003532d8afa2eeedfe5913d42b90f29d49d9eef4fd" }, - "Asset": { "1": "86T/V6DCjviie/Z7r0KrN2TKOTnY4cWeM458BlFU6/w=" }, - "CreateAsset": { "1": "LE0rScoj6BrnJspw4MFIi/GgHRwY4dp8O3rWfdBP+Vo=" }, - "Dataset": { "1": "Is1rUilFO3jnSEr2x8iYBm7b7tAbBN8Z8lbi43MECmA=" }, "DatasetPageView": { - "1": "CuPRePPl6YiUGG10xv7+05QOqw5C5t8pgg2sPl3bwSk=" + "1": "813732868a2749802b963a7df453517be70e8772c8514f4b2de6173a44cd044b" + }, + "CreateDataset": { + "1": "57fce597c56437396581234b80a8ad7ffe5cd59d92c7ea0febe907dd44233de5" }, - "CreateDataset": { "1": "Y4LSVY63dtpFuOh9DXlCyb9GsHQSAnuqd2Z+49rRbNw=" }, "ActionDataEmpty": { - "1": "bgFSnXAP5Y5MDp2m12KmAYz3BYizQbQM6bt8+SV0nyo=" + "1": "0954c410666ea48683c62879f3e20fedd85ed08ea8ad3bcf8e24e9e158e04e89" + }, + "ActionFileData": { + "1": "bdd2929f309babbc6ea32110852aa6af9a463840dcc5128a4127df06a5568221" + }, + "Action": { + "1": "e4003f1c14ad472de8eac34f16c4264791f1d3202b89cf6317ba8adb3c10ebe8" + }, + "ActionObject": { + "1": "c6b428043d2e3dcd5fa3c3a5dec06fb20b114f1f24c46a9230dc5cd4d66eeda4" }, - "ActionFileData": { "1": "pz2pLlCxo+YOKV/IvUGj9OE8iq7ioM+jh01yjxkoA7k=" }, - "Action": { "1": "zDzMNUSHGZl4vb83VNC2rWbJDZr3Bv+4IDH4TZ0A/Fc=" }, - "ActionObject": { "1": "LtfQw1cBX2hw/jYCKi9KqbBjTHLAqQPSWlnKEEBfCio=" }, "AnyActionObject": { - "1": "fzK4maUmKiy4S2rV0vZ7VqALDXfprO8QauGZF2oOWLY=" - }, - "TwinObject": { "1": "46fPSjqUeIm1X0AEw1s/t6do9ZiZfNblXExnMP6PdYA=" }, - "Policy": { "1": "Fm9nQqoJ5a8mDsClFHjRtxUEEZfKnukmyKOwk8GGiOo=" }, - "InputPolicy": { "1": "xGLFa6azUOLtDZhhHtg0AEQKZodjDqMmkFSPXGyioHk=" }, - "ExactMatch": { "1": "Ryz1Z/oaFkpVghaVhWITouMQc5SaBnV8vdy/biMzcM0=" }, - "OutputHistory": { "1": "Aooo3q0QwZC9ITD5uPfryjzXN7cDqHaI3CpuYbP6qKc=" }, - "OutputPolicy": { "1": "EVnZO8ANUo9xD8Ms00rK17cQlbPS4wetsLUdNVHNW1Y=" }, + "1": "8c7da79f28694e27d56e1591f25a6e099dc5f747afe56aa0f04d09880de4d322" + }, + "TwinObject": { + "1": "d10bf03fe7f86687ebcf16aec661c0f82dc61b6ba15b3e977b73e37d4e2cca7a" + }, + "Policy": { + "1": "f08f964a0f81e13fcf3bd52be172c42b447f52343cd73613b62d5bb42fc2ca6e" + }, + "InputPolicy": { + "1": "2f86e04f44a90ef19041b105202e90b8b5e15a5dd30f247272cb1a7a8e9abd30" + }, + "ExactMatch": { + "1": "280bf2403596feeb9562ce04c10e39ae13a79e6fde9b342e74e998baa9e3931e" + }, + "OutputHistory": { + "1": "0f0d905c7653e11ac2de4033bfc7ae62b35ca1f018156fdad04229976da22c86" + }, + "OutputPolicy": { + "1": "6bf9910e98a2d9779a7cc8514f59a328a8a0ab802f82db438dc1fe4c2072d885" + }, "OutputPolicyExecuteCount": { - "1": "tv9+lELPKGymvNsIeuqYcLfh4dcqnOzJt6cp9V5HZUo=" + "1": "cd61f8960e96e60b5faa047e4775edfb551581c34455fb5a014bf6a55eaa6719" }, "OutputPolicyExecuteOnce": { - "1": "42Ag72zfZGUtRhD69GHTt3Ao17ONWAZv8GUtTYFVamk=" + "1": "1325e7878ee6d724046cb671ffda37c23babc8c47815148624c3b04dec0bb47c" }, "UserOutputPolicy": { - "1": "ez36vPaSC0DN+cRGXyI/1IycwNhfWr4MI6WsPYh/+1M=" + "1": "5d22fb45a21d75227ea5c755c88d7b1b87bc6305b2a2b15ff42ac4500e08100d" }, "UserInputPolicy": { - "1": "RGI6BmZqb0HjouCRLVoP0oTy+9CjfBuLiV3RmC4eyRk=" + "1": "ce16b03b7bbaa2b680185d7e35ec1555381e10e393586d9488827ce6c53abd7b" + }, + "UserPolicy": { + "1": "0a3488f0e5d24fb230b78e41f897310b3eaadbafc39358936b820d92908a06ae" }, - "UserPolicy": { "1": "MSC0EODy/AABI6A4KoRYAI8wll1lCPLIrspm/vgLbx8=" }, "SubmitUserPolicy": { - "1": "OvMf5QQ54LApN/3OTihJ8sXXVt1Ob8F5I54Agpy6E0I=" + "1": "5d4385336238a0b095a9225efbd7475fc697f8f4e28b6978ffa286e5b5e73e85" + }, + "UserCode": { + "1": "5fa54bff122cfd5e481042dd5beb299c2b786ee746309f1a44caf58a5cda498a" + }, + "SubmitUserCode": { + "1": "c90de22a6b765771caed7ab5b4b88fdf0b13886edb3f7e23bbbd0a46a9ab7899" }, - "UserCode": { "1": "zflEdjmNBSo313bbafO72GwSi8oef6K1FeTmUvfhDFQ=" }, - "SubmitUserCode": { "1": "ffMpFbS2ORw8DVKaQAo2Qgax38XeW0IcNSa0KRq6IUg=" }, "UserCodeExecutionResult": { - "1": "/lOU4pxbpyrCchNiXWbpUfznhAZdUFDvrn28aydxwEs=" + "1": "e9d74d5c21fd91152441c892e671a080c8580e96bf352bcc8ceec6c22a638bde" + }, + "CodeHistory": { + "1": "0d27ce18a6ea2818b4f0f7ecabde63b9a322af3695dcecbb6185fe5415a30334" }, - "CodeHistory": { "1": "7I4gbWRYN3FY5uXB9oigPE/JIvn+ae2e4W105F48QEo=" }, "CodeHistoryView": { - "1": "SrcXTEpiVftCmmK4FdNxrAXvT7GHDC0RoitwrJ2vUXQ=" + "1": "f2ba91956dcac1e9244ee00b046dd604c91ac94c93119b1fe18a5f9f7411ebf2" }, "CodeHistoriesDict": { - "1": "UrL8zC+enllTYs+i/yfkjA1jhIRKYDI2CnBPAvlb228=" + "1": "5b23fe4675b6c2164a73500ed1e6a218ae82f89fffced6dfac4eafe3ac56f92f" }, "UsersCodeHistoriesDict": { - "1": "3wfodj9HAWVhzHtxWBWVcFSA+bX7XO8Uqld3Is/RwtY=" + "1": "acc66334b1789a3acfccef3d398f5474994de7e04269b4db3262b377444a55e1" + }, + "NodePeer": { + "1": "169d3a3028bddb140815fb8be3be0a63274ebede08b61ee21a707d200a5df456" + }, + "ProxyClient": { + "1": "281e5a54fbf581c83e46a80dfe6f77e313e3dbaec95ad6c24e3c4d4b7e899961" + }, + "CommandReport": { + "1": "ad641d7a7691ff4e223bd8968d04d7329dca55b51cd440944aa0a155ed585d70" + }, + "CommandResult": { + "1": "d673f70c20b176b9ab13ce64648cadc910fc5bfab610074480c26e2c31032197" }, - "NodePeer": { "1": "v79RAuN61iLzNFaTxKhRSrMq3AMT6GzOBTBHxjc5er8=" }, - "ProxyClient": { "1": "PpgG6U72PgZ6zOzvJG4/hGzXYZZPsZVb9lQbDhdgM3A=" }, - "CommandReport": { "1": "3IokYf9jUF8uFIsUyPdCg2GoypLzCU0Im1fSPysVM10=" }, - "CommandResult": { "1": "e3l3u85Fe3Dnu1kPpTd38VjrGDCZN47IFo5iDStnJv0=" }, "VPNClientConnection": { - "1": "HWV84pQiRBpeb+lcvIxWn+5D2ySto4bNOGX21WqfJTg=" + "1": "d7f59725a88a4b76d941139eff5dc6d14d13e0b13ccbcc66cc9b713661769d01" }, "HeadscaleAuthToken": { - "1": "x+6INRwt6luuqI2d1NYyMQMMXjBovQ9+bkZiffFbNEQ=" + "1": "6a3590bcd0a110fa485bb4178d630f177d9c534af856fad03f746156e239eb72" + }, + "TailscalePeer": { + "1": "80206abb427762765dc3b18cf2e47f557594cbfb284adef40ac1eab65341db63" }, - "TailscalePeer": { "1": "vCubjOmd0lpfC9CT4+MV1DGAg2nDzUZ29zAaawr6p9A=" }, "TailscaleStatus": { - "1": "br3ZMac7cI4n7pxD5S7lOpZ/2uoW8rFi2MmpDbgcGAU=" + "1": "f36cfab93f50d14ed3493c7bdd1833d642b80b38bb861e33d3ba0b8bccc30eba" }, "OnDiskBlobDeposit": { - "1": "jjJZl1vrOaO2CzBzP20s4D/q0Qe8PumsiyVa/FL2pqE=" + "1": "db940a4fd43f2b489875d27eab73193f0d69cc5eef7a754e99f83a4616ed5c35" }, "SeaweedFSBlobDeposit": { - "1": "4bRLAUv2PMXdGnt/7yPp9MuZCMGKhq6qQ8MZbOkHH2I=" + "1": "8372147e6eb56adc347a6c6c70df40cca65a6ec0a4e2c7858362375da82d8ed0" }, "NumpyArrayObject": { - "1": "3zbjWk0zX9PxbURLX3ShoVgCQOI81bfjYhIdaZ7HuMU=" + "1": "8e62c50b0f2dda5a2348fb0712136bcba753bb4392a6eb5952f3bcad2877a4d2" }, "NumpyScalarObject": { - "1": "IYG6Da3/CLgz0gerP9Ci2yJrjWONdPhNrFf8wOeZ/uA=" + "1": "05e4b92e020a5064ef376b5c8aa08883444fcd051d93403980ed6bf3a73201d9" }, "NumpyBoolObject": { - "1": "YIJ2Xmk7JnkGHAynY6HYCXOoKHJ9EYq7CZjNa3YdOWw=" + "1": "c83d6f2cf836e320cc338487f1921bc6abb27b2a6b6c0bbcad42f738bab39b6b" }, "PandasDataframeObject": { - "1": "uX4KTvRFIAlVwHfo3ZN34KSxf4MknVT8PTwQeMQJWrU=" + "1": "2cea2e20683ff1abe355c15fcbb02722694bf9cd170bb13b69fdd0fb772de61c" }, "PandasSeriesObject": { - "1": "/fq5bJw6oiyJHgDMx8TBCSA2mZZBfBWcVbSRJfA/cmA=" + "1": "77d074c7f8a6e46c34ac7ec21aa7836928dc6492ca3e7df30a250bf7d47a4de2" }, "ReplyNotification": { - "1": "Efh1C41x/1EtwQZFRMa6UpvxNZtykQjnqNNGenpmDUE=" + "1": "3695ca24420445654f4c2e62bac51f8a3f5899c45446b5a37e8c46625d51c8ea" + }, + "Notification": { + "1": "69f470aa9f94651c237ccc6c2fb4e9aad7fa03196bcc7a21c3662877c43760ff" }, - "Notification": { "1": "wUCtMuph8YS2oGIgPUi5v+d1KDW8YZA+/E1sV01Lyd8=" }, "CreateNotification": { - "1": "ROov4hDVjA+Z95CXBmiLbQxyVqmEClDP3UqOfw6PmzY=" + "1": "b0b9109841ae8c54bee62851ea4ff4e75aeaae0550ded16d6f71fab2f28118ba" + }, + "Change": { + "1": "a90eefd804d636624bcb9503dd60c2f73e91a7afb3a5d569a6a5f099dcf49434" + }, + "ChangeStatus": { + "1": "bb5ce990ef34f2494abd3e2c82f2871cccc813c08af837ad929f05be9477071d" }, - "Change": { "1": "NZWmhd9R8EefaKi+TTYYyLmBrOm3O1Yct9KHGliFDYo=" }, - "ChangeStatus": { "1": "AfAOdUeFuiQ+aDGNchAJdndMMDCKJZPm3SQo0jxMYAE=" }, "ActionStoreChange": { - "1": "5OP7I4PrGl14pve4o2XZHNbYIcukfZnm1wQ1uLKZVyw=" + "1": "62bb0420bb5fcdad6d2cd2c628c059ace9030bfea3864f9a534cfa7ad8f7d7f8" + }, + "Request": { + "1": "769a605653d6ddde60f0ff058dadfd6e136cf8f1f25e0b16f491b23fc85a8bf5" + }, + "RequestInfo": { + "1": "c142149b3880adde9c48500acce2ddfa32f97e0917bbbd44834abcd7e054e018" }, - "Request": { "1": "QSQeT4yJHe3+wxuu7nGzdDBNHr50f1Ip4Cjf/9EP7gA=" }, - "RequestInfo": { "1": "5D+tU0qoSCdTw7FFk+AF3avu8NW/DqmpMW1ZC7vvjhM=" }, "RequestInfoFilter": { - "1": "wZ8Jj3mfrh0HStaDC9TtqP4QeemCNI/eHRcvEZSJAng=" + "1": "1d91467abbca92b6bca664d697369e0d26187e338e0e96a9d765b2a1f616ed23" + }, + "SubmitRequest": { + "1": "3345ebf3a0ace3678eb6929294606143e3d1dec3da76843148b2ae773d86c471" + }, + "ObjectMutation": { + "1": "29fd705dd5c435b19191bc662ca1ec98726e446c173eb779f11af03501e98296" + }, + "EnumMutation": { + "1": "792a9c1b533e02277db1650079d342cdcd34efc723462409e724a68c0cab6143" }, - "SubmitRequest": { "1": "h9E55cpe7j2zy/PwEZ5xaLJkYj7xFINLqqVRa09w5nA=" }, - "ObjectMutation": { "1": "EwPhg9E0wYjsWzDYTMZSGkJWSkoiaGoo039K6dLoPkQ=" }, - "EnumMutation": { "1": "7/qPjBhnbmzXNV336VquEPUQVAR6gCVK+GfCHe+7sSo=" }, "UserCodeStatusChange": { - "1": "/eWpYpn3Mc8rE9OvaXI+VDjYKCOurvii2DRnl0UTZsA=" + "1": "c87a132da945ea099bd8f917081dbc1030d1c37c46f122112d37a65d0d357a00" + }, + "ProjectEvent": { + "1": "a3456153fec47f5aa62b6ee7ec8b4a620dea40ab91c05f8c5fd3a6c9b6921ed7" }, - "ProjectEvent": { "1": "WzGXlQZ+ol/VjFJQIKUBUGpMcfwQYmCyAIJQYAn7Mco=" }, "ProjectEventAddObject": { - "1": "mERxpyorW4sE8pD7uC0ZZVBrmTmc5nyiVIyGMfclxkI=" + "1": "a7f946535a6bf3421901457718276380eaad1bfb4f2ef0b0a4b0fbb0e6dc13f8" }, "ProjectEventAddLink": { - "1": "10IEtfP2Czcmj7EwMzcudPsvJ98AI8O/CIPyU/LKcss=" + "1": "3bb01deaca94356a1b18d2fe7e11bca7cdcf0c886dcea3afe24d10127f08bc86" }, "ProjectSubEvent": { - "1": "biKRZDDymRIhp1jkG++TDHFpP99fXTDt/EksSmIF1PA=" + "1": "dec533c0aa5dcd15dbffec58adfcb787580bc12bf918f300fa89bcd38934df88" }, "ProjectThreadMessage": { - "1": "oy8Bi5hXE0kKFe1LS9nztTqfdKT1BteE2Twhv87kHrk=" + "1": "4743c1dde5fb758ea2a204913dc94cc8185d3ef800aa9ac3a4406ca9599d794f" + }, + "ProjectMessage": { + "1": "d5896bea45ed2a56e7eea718fc5b2ef82719e301ca8fb01ba6d7eb44b3b9659c" }, - "ProjectMessage": { "1": "DHi5Uu5OkShhuGhD+x3x7WLYqV0phkzMXi+BN/8/zAM=" }, "ProjectRequestResponse": { - "1": "9lQqMZoPHZUgm64VeGOxb/vpAM+sqjr9xfl9NsvrIvo=" + "1": "e086f9034fde676907bf6d3bb65f3ed08eed78f5fb099aa5741114b4c41b0b1f" + }, + "ProjectRequest": { + "1": "9e3d9351857e9a95fe15ed6b562db2c96c59636a177d60a7ec44606a9263457a" }, - "ProjectRequest": { "1": "yaiksOZaNRotCvO9kueF6JJkJI8nDN3bsoJ0k3wVqzU=" }, "AnswerProjectPoll": { - "1": "GHLwVGsp0Z6dKtNDmPzX3XMvOAo0O2Qkde1ZGK4hLEQ=" + "1": "7732c099f8ce9813124ef543836cb4993411f1335d92c8b90502563eebdf95e0" + }, + "ProjectPoll": { + "1": "0747263d1c9b47db16aa3a95a5078e7d92e067ad1442b618a27f9c031e0a8470" + }, + "Project": { + "1": "57a7a11cf1afbec71dbdc6e19743a71370c96cfe0e295613bd5454acdd81355c" + }, + "ProjectSubmit": { + "1": "dfe6df16db7d814cf04f4e93b763a8fed050ac157d838f14123608f23338d597" + }, + "QueueItem": { + "1": "e7a0998ee55ecb4f2f7b01b3eff4d337ae5b2827523e2ce6013ef9ac4ae19498" }, - "ProjectPoll": { "1": "qhkA5vgUMx+vk0TB1JAO73HXpFaqLzvNgzeN4WhprBQ=" }, - "Project": { "1": "tKhBH5IMhBm5zhnWIVjVsYEpWDMaeFd2rarja9A0Zms=" }, - "ProjectSubmit": { "1": "2Wh5J0B3lpFAPREF4HOkYeytjF6JERwAmNGeAaCRgag=" }, - "QueueItem": { "1": "pil5vSG1vRPWIKCK6dtwLeR4EVhrAcsgZRDwtdpBwwo=" }, "ZMQClientConfig": { - "1": "i4LNWTxwpNGlHCqk16BSqDwUZcus3V76WeE+IDbL9g8=" + "1": "08aedb35b045ea28754efe264e56dd3fe5583bc898f5e6b323505f00079415dc" }, - "Plan": { "1": "W/HvkLL9/FwwPdANAl8WOJ/Ib1x2UkdR0utu3T84byU=" } + "Plan": { + "1": "88e44bfd37e4963fc1d6ea1470beba00fa20bcb843a8fa41cd74634bb3e24962" + } }, - "hash": "NblGKIJFliKGb85F1K3f9sLThcZSAqVDp2RUlxyI9Jc=", + "hash": "d6f228425a6d680c745611308eb01902b88d3bf3269a6506d12cb3c2f4566dac", "supported": true } } From 2a8d0376a47e66d94e732aa56aed96fbe181b62d Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 21 Sep 2023 14:48:55 +0530 Subject: [PATCH 24/67] - fix hash being inconsitent for object annotation - add property to list supported protocol version - add property to list latest protocol version - add method to validate current protocol state - update protocol_state_dev.json Co-authored-by: Kien Dang Co-authored-by: Peter Chung Co-authored-by: Khoa Nguyen --- notebooks/Experimental/Data Migration.ipynb | 138 +++++++++- .../syft/src/syft/protocol/data_protocol.py | 60 +++-- .../src/syft/protocol/protocol_state_dev.json | 236 +++++++++--------- 3 files changed, 299 insertions(+), 135 deletions(-) diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index c47b27c15a2..c8f1dc466d5 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -654,7 +654,6 @@ "# \"stale\": False \n", "# }\n", "\n", - "\n", " \n", "client: [1, 2, 3, 4]\n", "server: [5]\n", @@ -718,12 +717,12 @@ "4: {\n", "\n", " \"hash\": \"5235sad4\",\n", - " \"object_versions\": {\"User\": [2], \"NodeMetadata\": [1,2,3]},\n", + " \"object_versions\": {\"User\": [3], \"NodeMetadata\": [1,2,3]},\n", " \"stale\": False\n", "}\n", "5: {\n", " \"hash\": \"13124214\",\n", - " \"object_versions\": {\"User\": [1, 2, 3], \"NodeMetadata\": [1, 2, 3]},\n", + " \"object_versions\": {\"User\": [2, 3], \"NodeMetadata\": [1, 2, 3]},\n", " \"stale\": False\n", " \n", "}\n", @@ -738,6 +737,46 @@ "# communication protocol:\n", "# No intersection -> Highest of the intersection of server and client\n", "\n", + "\n", + "# client: [1]\n", + "# server: [1, 2]\n", + "# latest_protocol_on_server: 2\n", + "# latest_protocol_on_client: 1\n", + "# communication protocol: [1]\n", + "\n", + "\n", + "# Checking this on both client and server\n", + "if communication protocol < latest_protocol_on_server:\n", + " migration can happen on server\n", + "else:\n", + " migration can happens on client\n", + " \n", + "Based on the communication protocol derive,\n", + " - client_version for an object\n", + " - server_version = version in the latest_protocol\n", + " \n", + "\n", + "\n", + "@service_method():\n", + "def create(\n", + " self, \n", + " context: AuthContext, \n", + " create_user: Union[CreateUserV1, CreateUserV2]\n", + ") -> Union[CreateUserV1, CreateUserV2]:\n", + "\n", + " # we just need to take of care of \n", + " # data migrations of the table only\n", + " client_version = \n", + " server_version = \n", + "\n", + " create_user: UserV1 = user_create.migrate_to(server_version)\n", + "\n", + " user: UserV1 = self.stash.set(create_user, context.credentials)\n", + "\n", + " # no need for any migrations from \n", + " # Viewable/Intermediate Objects like UserViewV2 to UserViewV1\n", + " return user.migrate_to(client_version)\n", + "\n", "\"object_versions\": {\n", " \"User\": {\n", " 1: \"sadadefafa\", \n", @@ -753,6 +792,14 @@ "# )" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "3f9b679a", + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": null, @@ -785,6 +832,21 @@ "# protocol.dev.json" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "791324b5", + "metadata": {}, + "outputs": [], + "source": [ + "# add a new version and test for upgrade_protocol and validate the communication b/w client and server \n", + "# two branches\n", + "# - first branch: add a new version to one object\n", + "# - second branch: add a new version to second object\n", + "# - merge branch: generate a final protocol state\n", + "# - validate the communication b/w client and server " + ] + }, { "cell_type": "code", "execution_count": null, @@ -797,6 +859,76 @@ " canonical_name\n", " protocol_version" ] + }, + { + "cell_type": "markdown", + "id": "d8642547", + "metadata": {}, + "source": [ + "### Implementing communication protocol" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b0981778", + "metadata": {}, + "outputs": [], + "source": [ + "# client: [1]\n", + "# server: [1, 2]\n", + "# latest_protocol_on_server: 2\n", + "# latest_protocol_on_client: 1\n", + "# communication protocol: [1]\n", + "\n", + "\n", + "# Checking this on both client and server\n", + "if communication protocol < latest_protocol_on_server:\n", + " migration can happen on server\n", + "else:\n", + " migration can happens on client\n", + " \n", + "Based on the communication protocol derive,\n", + " - client_version for an object\n", + " - server_version = version in the latest_protocol\n", + " \n", + "\n", + "@service_method():\n", + "def create(\n", + " self, \n", + " context: AuthContext, \n", + " create_user: Union[CreateUserV1, CreateUserV2]\n", + ") -> Union[CreateUserV1, CreateUserV2]:\n", + "\n", + " # we just need to take of care of \n", + " # data migrations of the table only\n", + " client_version = \n", + " server_version = \n", + "\n", + " create_user: UserV1 = user_create.migrate_to(server_version)\n", + "\n", + " user: UserV1 = self.stash.set(create_user, context.credentials)\n", + "\n", + " # no need for any migrations from \n", + " # Viewable/Intermediate Objects like UserViewV2 to UserViewV1\n", + " return user.migrate_to(client_version)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0fae05c9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "55064bc7", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index acaa5348364..04c3e488b0c 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -4,7 +4,6 @@ import os from pathlib import Path from typing import Dict -from typing import Hashable from typing import Type # relative @@ -31,8 +30,8 @@ def data_protocol_dir(): return os.path.abspath(str(Path(__file__).parent)) -def make_hash_sha256(hashable: Hashable) -> str: - return hashlib.sha256(bytes(hash(hashable))).hexdigest() +class InConsistentVersionException(Exception): + pass class DataProtocol: @@ -43,7 +42,7 @@ def __init__(self, filename: str) -> None: @staticmethod def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: field_data = { - field_name: hash(model_field.annotation) + field_name: repr(model_field.annotation) for field_name, model_field in klass.__fields__.items() } obj_meta_info = { @@ -80,8 +79,8 @@ def find_deleted_versions( current_object_to_version_map: Dict, new_object_to_version_map: Dict, ): - deleted_object_classes = set(current_object_to_version_map) - set( - new_object_to_version_map + deleted_object_classes = set(current_object_to_version_map).difference( + new_object_to_version_map.keys() ) deleted_versions_map = {} @@ -94,7 +93,7 @@ def find_deleted_versions( if current_versions is None: continue - deleted_versions = list(set(current_versions) - set(new_versions)) + deleted_versions = list(set(current_versions).difference(new_versions)) deleted_versions_map[canonical_name] = deleted_versions return deleted_object_classes, deleted_versions_map @@ -141,20 +140,23 @@ def recompute_supported_states( def state_defined(self): return len(self.state) > 0 + @property + def latest_version(self): + return max(self.state.keys()) + + @staticmethod + def _hash_to_sha256(obj_dict: Dict) -> str: + return hashlib.sha256(json.dumps(obj_dict).encode()).hexdigest() + def upgrade(self): object_to_version_map = self.calc_latest_object_versions() - new_protocol_hash = hashlib.sha256( - json.dumps(object_to_version_map).encode() - ).hexdigest() + new_protocol_hash = self._hash_to_sha256(object_to_version_map) if not self.state_defined: new_protocol_version = 1 else: # Find the current version - current_protocol_version = sorted( - self.state.keys(), - reverse=True, - )[0] + current_protocol_version = self.latest_version new_protocol_version = int(current_protocol_version) + 1 @@ -175,6 +177,36 @@ def upgrade(self): } self.save_state() + def validate_current_state(self) -> bool: + current_object_version_map = self.state[self.latest_version]["object_versions"] + inconsistent_versions = [] + + migration_registry = SyftMigrationRegistry.__migration_version_registry__ + for canonical_name in migration_registry: + available_versions = migration_registry[canonical_name] + curr_version_hash_map = current_object_version_map.get(canonical_name, {}) + for object_version, fqn in available_versions.items(): + object_klass = index_syft_by_module_name(fqn) + object_hash = self._calculate_object_hash(object_klass) + if curr_version_hash_map.get(str(object_version), None) != object_hash: + inconsistent_versions.append((canonical_name, object_version)) + + if len(inconsistent_versions) > 0: + raise InConsistentVersionException( + f"Version update is required for the following objects.\n {inconsistent_versions}" + ) + + return True + + @property + def supported_protocols(self): + """Returns a list of protocol numbers that are marked as supported.""" + return [ + int(protocol_version) + for protocol_version, protocol_state in self.state.items() + if protocol_state["supported"] + ] + def upgrade_protocol(): data_protocol = DataProtocol(filename=data_protocol_file_name()) diff --git a/packages/syft/src/syft/protocol/protocol_state_dev.json b/packages/syft/src/syft/protocol/protocol_state_dev.json index fbfb208c3bf..44a1c5f019c 100644 --- a/packages/syft/src/syft/protocol/protocol_state_dev.json +++ b/packages/syft/src/syft/protocol/protocol_state_dev.json @@ -2,356 +2,356 @@ "1": { "object_versions": { "SyftObject": { - "1": "3e4451972033459d3292997b6645b4762214d554ed290e4d629542dcf5a6e90d" + "1": "25a574002025025cfd155e3970305293e21fdd6af9dcde176990802306cc0359" }, "PartialSyftObject": { - "1": "2bc6d28d8ae433410b12733b7a7a3d0943933aa7ca529a2c96bfb82370490214" + "1": "fa2770d76f3dd904e2c1c4a78c9ba10e5ac33b4dd7a4d61faa45a22078e94aa8" }, "NodeServiceContext": { - "1": "457c30b8d6cca2db97a839b6d9e414c3d810d8c0100fa83fecaeac3c4cc760ae" + "1": "3d409e38765cd0be89a3d36718c0a34bdc9ed0a6d67d2e6d10895d88df8595e3" }, "AuthedServiceContext": { - "1": "f4a5acf9f2efd050b6ac218f4741fae7b6537cb79bf56be06ef03a272ca26bd5" + "1": "cec43782533403ab89a070a9548c4e9f4cd3bf028b4947fa9af7f83cb6c87d32" }, "UnauthedServiceContext": { - "1": "3d23424d2e55cb982d1904531f731d793d7cb5e2beebec67014cfcb9d5cb5c92" + "1": "f83f0d6ed8ef5f5b11090e2a8c56d4255ea2adeb828bb9f983cc007920c7d4a7" }, "NodeMetadataUpdate": { - "1": "47448bdb770590b51883b38f3e1037a433c89d09ace0b5ff0aca59837cde319c" + "1": "b73adf379a179f6ef9a6d5ee9477e69697a47ad25579bab46cc1fc44ec35ba04" }, "NodeMetadata": { - "1": "99298e7f56a7f1dc654efbf7eb5e27e763b77b9ac82f16effd4f0a513864cb09", - "2": "a2aca5be420371005a87051f5d25d9fff574484525efc0c5403dff6de210b812" + "1": "015a04cbfa19adbad7f606e0c419a1dc615a27eff7b76c426853ea3ca4eda860", + "2": "3986c7d8b3cf9355a1fbdd99dfe3d872fc464420b91a668ea3288ee4481bab6d" }, "LinkedObject": { - "1": "32ed44724ac6ac63f07e89a900e3ea7f0df55b99cec649d2d7b0b629d82bbee5" + "1": "8a117f8bf0282b8cf525de893404dbd88cc460a5a823d83006c2e82d88846f8d" }, "NodeConnection": { - "1": "8b053d85cf16fb1acb9681cf9036e521530ff459af84351c45d0f9344fc588c1" + "1": "9f2049387cf966342d16c295d4fa4012845f952399e9d1d96d77f62762f091e3" }, "APIEndpoint": { - "1": "241d24a9b4116d25990edc5bb51351bb8657e61afe8b1f2ce93e09ba246adc66" + "1": "c88e4405839e87fdfe90f86877ef2addd7be7281d36b7891636129fc8b3c1e8c" }, "SignedSyftAPICall": { - "1": "3ea546d2e55d7f3f26c7590240588304b07c342de750b8fa55da0ebae19f5439" + "1": "2be9b74663354b4edeef3bc75dc67dc35bf24890c8a86a53a97957d470af06b2" }, "SyftAPICall": { - "1": "041851168f212a0faf691862c11b805238047d07c3e44f31f79e0f31e7b0679e" + "1": "fa9520d29d9df56fb9d5d2080aecfc3be14c49e7267a3e5b9fd05ad1b0828e11" }, "SyftAPI": { - "1": "946d442e4d0b5bf31f8c0de6d8e022693d41c7a440f6c8429ef40758d404513e" + "1": "3f96e5d7a189258f66b6c119f835a0ae301843b0c016c634211f124ae83cd179" }, "User": { - "1": "1aa690bb2163536a2aebb6de73676e1b504bec744ab82a8dfabdad2ebd184d5f" + "1": "21cb3659dc4ddd0b4d58c677f46feecac9f682ea36660f42845067b29b5ad8e7" }, "UserUpdate": { - "1": "7ec9f3d5103ea8ed08268f17f095e2172d0e9e3bb696a7266f86831f97f00bd0" + "1": "f12c19dd38330f98fb2d9e0bf47e8bdcad3f6e1c085d2994d80224cf4b905984" }, "UserCreate": { - "1": "152991d21a29e7014001d77be45aa2a83ffb855464478eeb9e99712b9b9c1d3e" + "1": "a9d7a52aaa7dcf622e317e899e1ded3023a94b86773ca16cd7d6a334fcffbe8b" }, "UserSearch": { - "1": "ef1833ade6c67812702ac9ddf6d6ac61192cf962d6ec0232e8d593add7273d96" + "1": "e697bf5b287cf29560c94c5851d8fb6ac74d2ce5c6200539a11a257bc150c75b" }, "UserView": { - "1": "6bc4efdad3a28e009a12f3a865cbc11c77b187fdc4c8174e28d735e28d1a9af0" + "1": "fd624963af09c0e3471dfc49b2f09fafdd7521c8af804198015cc5455d7b56bc" }, "UserViewPage": { - "1": "78feb1d73aec6db844f2716d590701e082f021f48ba5852bb86781f77c0bdaeb" + "1": "c856f13ddc9a405b6d52482a6a273ffb038f988d589e7bb5cd68e0c8dd8668de" }, "UserPrivateKey": { - "1": "29547db4f2f1c3d10f35ce04adc7e4ca00f6e9041a6fce922da97ab5c43f1a18" + "1": "e06f237cdfd516caff0766b5f8ba4e4b2b85824c76910f1374ffce72173f8269" }, "NodeSettingsUpdate": { - "1": "3adf1ca892f50565c57503843ffd846093a74a0c09c40317d022e3cf2075cc7b" + "1": "ae77fb9f24004635a29abd979f5efa5a75780efe4fec1773cc533ac04aa57482" }, "NodeSettings": { - "1": "74a99ed759776b9625f3b0db2b15fcae62d07712af6c666cc533278e6eb5fe6f" + "1": "2735f660f23bfda8ffdf97b8ba3ab1bcdba01461b245832e9a9cb2a661ebcb74" }, "HTTPConnection": { - "1": "19d5ea982ea36531feb3ce932661a48f76268d45fe66a872c9744a5928f352e8" + "1": "0a6e181e67978ce45a2414be4c4c39272ca6ed38a5fe9e9877619c13dc6aafef" }, "PythonConnection": { - "1": "16b19771638bdd4810da5c9cba81c63a738840483c9219c077d280e201972f7b" + "1": "6ab9e80d2208ce44fb6e5db8427234680f56b1ef8092be701d6d833b6e213926" }, "DateTime": { - "1": "2de23e90acf8db16b89d64dbeefedbf14dcd8829fd0d969bd572f8211271ef5e" + "1": "34f9942a3f75988a1de6e46c40697698f2505336cf74282f683cfd3a7d6d4ec1" }, "BlobFile": { - "1": "b3d133f06c4817e4bb881322881f07c4567e931d194d2a116fbce3c191431fa7" + "1": "229121eb07430f72c66281764a690270ff821a6551c036528b9e749b343bedc1" }, "SecureFilePathLocation": { - "1": "2827f67e013eac3ccfd1a54300091d385765f144ad375c5d5b3eaf3679825c7b" + "1": "d3e6b95de5da0861922c302e9dabf443ee337b21da695d69c85bdb1e6f0ec45b" }, "SeaweedSecureFilePathLocation": { - "1": "b2e3cf9fe876e61979323203b5fd880330662b30910f0077b667fdc7cb0a441a" + "1": "0d5c382191c63e68b90237bb4e882abea6311ff1ba645adc784ee272de5f4623" }, "BlobStorageEntry": { - "1": "f20a0e15c251f3822ba35f13125ba96c895d1e55eeb7b8191ff98b84ac70ee31" + "1": "e010b50076f73fb934029a583310d13c3ec7abaa93520090fae3fb16457868fc" }, "BlobStorageMetadata": { - "1": "bf2f0769aa0a18b5d28d73c676288a15507e8c00b0325a80b38870a35337a17a" + "1": "f1d0b4085276ba5f15a8cd81553374d465317d96e0a1c427e33e2d866e362d22" }, "CreateBlobStorageEntry": { - "1": "f0a6ddd17072e8e69f6d65f851d3f029052f7ebb170432769d46fab25498a662" + "1": "183fd3ed16b0687f01979b3b76bbb17a9b029bd39f11d46faf54b3e5205e9e2d" }, "BlobRetrieval": { - "1": "a6c9369a6a8208ceccb71ca29a3391546aab3040c2088860b862d8d95f4f4acb" + "1": "c55f486ea79e9e96c047b32987e555357248cd30f18e1a8244fefe73457e5b9e" }, "SyftObjectRetrieval": { - "1": "238c5fbe27427ab9214370954053b3d0198c97497ba5c8b990782abb9f941615" + "1": "da96484c8e57fc060c4dba29ef4e375284720dd05f1ed2ee60e1df52450437cd" }, "BlobRetrievalByURL": { - "1": "8d7d5d4bd624e33bf214dd8cc9b453edda1e586288b990a4abbd3ce58de10485" + "1": "656a44e91ce560056679b459c9fd33b55a5c2b0754e455099a456074e2e14822" }, "BlobDeposit": { - "1": "d5fc7d669be5847ad5847c9e89d06ec37b65d31e29063437fb3ce187d05517bc" + "1": "23a73cc9bff8e6833681e55d872121b6d54520d76f9426fd199a19eb847deea4" }, "WorkerSettings": { - "1": "312f82db87d952c5b7815757d9b86fd4b348fb32d25cab2d32169b97b9a0c145" + "1": "2fe75dd39cb6367bd9cea2c7f59e40a85bbbcfc44f518572f377ef25c3acd205" }, "HTTPNodeRoute": { - "1": "3c9790fcc8bd19a6e439144f2bf651a3e6a1835663e592800c8df19a591489f0" + "1": "b4662c11f7487ab907caf3cadf8c33eca2e0fbd640ed1fba872c3f982b749986" }, "PythonNodeRoute": { - "1": "5f12a1198e41028c15ab6109082938e26f088f104ac4cc70b4b70f14b43bae69" + "1": "d8f268996e5443a248cc2eb5f4a568591e5f9e18952f8519f3f995e1f1f486e4" }, "EnclaveMetadata": { - "1": "a62e0bc349049c704bd3bf54dd5b4c57996ce965d23b4e70a52e83f67efe0417" + "1": "6ad19306231ebbb4d8b0c4e4cc82b881298835862a2c48f2358db47215a020ac" }, "DataSubject": { - "1": "24156a8cd0c672409bad05645f6507549d26c068922fbad3f9e90b0404b81b17" + "1": "287ab306e1c4ebe0c883600ffd52dc734d08f0217b2a961afbdc6c7883bf4ccd" }, "DataSubjectCreate": { - "1": "75055403f67e45663cb9a34d3b9038a6d377b06a2aad0695f92e878dbc119694" + "1": "8b3487af42ba249d7cf705c7f66a09dd832c055814091def34a38133f8148158" }, "DataSubjectMemberRelationship": { - "1": "dec77a2cb0aa2b126e86e95f5e8ca9b37263a9ab5084d208021c3afe6a58020e" + "1": "6aed0e1548d6b09bfac132040f7315f49c13855a6bc147a4f1aa4ce09572b952" }, "Contributor": { - "1": "b05268de04c54886b884b89a55d284fb26358e3f40c79d9b117780cafe5881be" + "1": "3e27f1ea23cecfc3e0956743ae832f0f70ecd144f9df3f128b123e9347944afa" }, "MarkdownDescription": { - "1": "abd4fdf31bd925fd899b665ef97f6710d4223dcfe49dda42f351d165581fb9e9" + "1": "506d47fa85728ad444f2fa657e39b341bc759d21a80325831b1e84926ee748f1" }, "Asset": { - "1": "a09f6f43b999c2ecf714f7b10a6a58bb2fe76fd4bdab43b1d17d7c2d93213b44" + "1": "f8370e8dd87df6a05bde1226c52c3ce6e7de636e6032341b977efe5e601a731d" }, "CreateAsset": { - "1": "832f8a9bd62bf7938afa987a250dc635eb9d48ef874183e1b2ed49ece4888a9d" + "1": "c3a82856694adcb3c916a961776c2fa3bc90a7ccb50b8b9d42776810266ed241" }, "Dataset": { - "1": "20b396058fbee41b3d3366003532d8afa2eeedfe5913d42b90f29d49d9eef4fd" + "1": "d7a45bf9128472715e63a67192d4181672eadd8b5873d2ba96c2df3b2be749b9" }, "DatasetPageView": { - "1": "813732868a2749802b963a7df453517be70e8772c8514f4b2de6173a44cd044b" + "1": "c7da1fac28f70c19d199f123b04fbd4a9c7681e3846dee0de70ea926a9440a2e" }, "CreateDataset": { - "1": "57fce597c56437396581234b80a8ad7ffe5cd59d92c7ea0febe907dd44233de5" + "1": "6a31617de99738bc176f19397884581051443c7c7ba952c983929fed688a9d7e" }, "ActionDataEmpty": { - "1": "0954c410666ea48683c62879f3e20fedd85ed08ea8ad3bcf8e24e9e158e04e89" + "1": "fc83d91ac6ba78c749a4e7e16c1aa117baaae62b5f33156ded4e5a79b9532a98" }, "ActionFileData": { - "1": "bdd2929f309babbc6ea32110852aa6af9a463840dcc5128a4127df06a5568221" + "1": "47a0a5f9fb48de09885d4f9a6c5a5a05f4dd04575ea42df7dea0cab02817063f" }, "Action": { - "1": "e4003f1c14ad472de8eac34f16c4264791f1d3202b89cf6317ba8adb3c10ebe8" + "1": "204b3c97b41f208ecb17c541af55e171675faaefa10c103b405a4077b0226a7e" }, "ActionObject": { - "1": "c6b428043d2e3dcd5fa3c3a5dec06fb20b114f1f24c46a9230dc5cd4d66eeda4" + "1": "8b2fcd90f6f05bb438ac2fc7fa80a480f1509990686c86a52c37db6448b869bf" }, "AnyActionObject": { - "1": "8c7da79f28694e27d56e1591f25a6e099dc5f747afe56aa0f04d09880de4d322" + "1": "f11bd6135ba46247c06bfa8a3c6b7f2e540a1033afe0512c3359e31eb3d59388" }, "TwinObject": { - "1": "d10bf03fe7f86687ebcf16aec661c0f82dc61b6ba15b3e977b73e37d4e2cca7a" + "1": "8f6abd756d41f9639865c6fd55e6cc2ec6b89fd18bac3c77bf3a0502d81ca8ec" }, "Policy": { - "1": "f08f964a0f81e13fcf3bd52be172c42b447f52343cd73613b62d5bb42fc2ca6e" + "1": "875c4ce86029c627cee2fdfc6f78807fc1a2a0c8ad58e3ffc5122b8d91f4bd1c" }, "InputPolicy": { - "1": "2f86e04f44a90ef19041b105202e90b8b5e15a5dd30f247272cb1a7a8e9abd30" + "1": "f096b87570dd66a0986dec6d52aa7e516a40186c6038a08c1eca737cb109935a" }, "ExactMatch": { - "1": "280bf2403596feeb9562ce04c10e39ae13a79e6fde9b342e74e998baa9e3931e" + "1": "f71a495d2452190596fe435eaf59b07ba390d7895f6c808a2f87a1719227ba9c" }, "OutputHistory": { - "1": "0f0d905c7653e11ac2de4033bfc7ae62b35ca1f018156fdad04229976da22c86" + "1": "24f282dd181ecc7a05c93c02dff84dff45aa52928e2331a15ef667645e9bcf0b" }, "OutputPolicy": { - "1": "6bf9910e98a2d9779a7cc8514f59a328a8a0ab802f82db438dc1fe4c2072d885" + "1": "f56d3427c23f1faf29e9bf4176c12c06d0b6f4d46e5d23f4b8e7c2b110c1b5c8" }, "OutputPolicyExecuteCount": { - "1": "cd61f8960e96e60b5faa047e4775edfb551581c34455fb5a014bf6a55eaa6719" + "1": "95d198d2021dbaf9965df39a4571ad06e787684ff79bd6e8a720c47825eebd7e" }, "OutputPolicyExecuteOnce": { - "1": "1325e7878ee6d724046cb671ffda37c23babc8c47815148624c3b04dec0bb47c" + "1": "b8bc1fea2e9b51b5dfc3cbd4b0a131cb2a5b1fe746b5e329395bf319b38bf9b2" }, "UserOutputPolicy": { - "1": "5d22fb45a21d75227ea5c755c88d7b1b87bc6305b2a2b15ff42ac4500e08100d" + "1": "a5e9e5f3b165d99c33b7de5c1ddf338baceda6f9f66d3b94bc92f2b47f52129d" }, "UserInputPolicy": { - "1": "ce16b03b7bbaa2b680185d7e35ec1555381e10e393586d9488827ce6c53abd7b" + "1": "a177ffaf9de5acede248c81ee55421f3f6aae696f8614c5d175a42d36fff1615" }, "UserPolicy": { - "1": "0a3488f0e5d24fb230b78e41f897310b3eaadbafc39358936b820d92908a06ae" + "1": "ec3f8ea84e2b05ce56db8f35cff14f78569e921f566477581b3299eb6a9fa053" }, "SubmitUserPolicy": { - "1": "5d4385336238a0b095a9225efbd7475fc697f8f4e28b6978ffa286e5b5e73e85" + "1": "3147a4e45270367a40ca8c4b7b502c8683200d123828720365521b90b2775794" }, "UserCode": { - "1": "5fa54bff122cfd5e481042dd5beb299c2b786ee746309f1a44caf58a5cda498a" + "1": "ad509fccb7bb5c00971453c1f1235785f40d5d5b3eee3df1dc9edafc758c7193" }, "SubmitUserCode": { - "1": "c90de22a6b765771caed7ab5b4b88fdf0b13886edb3f7e23bbbd0a46a9ab7899" + "1": "b4a919054beb2488c7b4839d60c494400d791112adf009631dce13787cd58e78" }, "UserCodeExecutionResult": { - "1": "e9d74d5c21fd91152441c892e671a080c8580e96bf352bcc8ceec6c22a638bde" + "1": "9aab24def6616ac908ef1100b654b4dbeca1ea4cfff229c53d6b14491c795555" }, "CodeHistory": { - "1": "0d27ce18a6ea2818b4f0f7ecabde63b9a322af3695dcecbb6185fe5415a30334" + "1": "bbbd59801717a98691034a37c9de388c7a65db2721bd0c5c9ff0dbe8fc70be67" }, "CodeHistoryView": { - "1": "f2ba91956dcac1e9244ee00b046dd604c91ac94c93119b1fe18a5f9f7411ebf2" + "1": "142e78415da10dae739e923d39ce511496a3c7b31e8c4553a6cbb1907c126a3a" }, "CodeHistoriesDict": { - "1": "5b23fe4675b6c2164a73500ed1e6a218ae82f89fffced6dfac4eafe3ac56f92f" + "1": "453af101a1de8e37a0bfacf22077c35994718791f295956f1f06727f8d9b7fe8" }, "UsersCodeHistoriesDict": { - "1": "acc66334b1789a3acfccef3d398f5474994de7e04269b4db3262b377444a55e1" + "1": "cf8ef92a08cabb068e4616c1700451b75ba4d511343830e3e56b65882fb784aa" }, "NodePeer": { - "1": "169d3a3028bddb140815fb8be3be0a63274ebede08b61ee21a707d200a5df456" + "1": "8920d9e456fd1a13f46c0528a8fe3fec8af46440b3eb89e8d7d48ad64babee1e" }, "ProxyClient": { - "1": "281e5a54fbf581c83e46a80dfe6f77e313e3dbaec95ad6c24e3c4d4b7e899961" + "1": "c9698a29223df0d477688293670f3a7a7d51556d4b2ed7e222e55350a0b053f4" }, "CommandReport": { - "1": "ad641d7a7691ff4e223bd8968d04d7329dca55b51cd440944aa0a155ed585d70" + "1": "a81fe3d0cc5796f45e925d09b6e8132b79fe5df0f341d55b3870c109f8c1e19d" }, "CommandResult": { - "1": "d673f70c20b176b9ab13ce64648cadc910fc5bfab610074480c26e2c31032197" + "1": "14b48d4a1cbc5f5ae1e5e74834e7f1002adae7b2766000ea822f180fd7cd39db" }, "VPNClientConnection": { - "1": "d7f59725a88a4b76d941139eff5dc6d14d13e0b13ccbcc66cc9b713661769d01" + "1": "7d44711978f930d42c06d04483abcdb1f230782a8f16f4feb5efb7b2b2093bb2" }, "HeadscaleAuthToken": { - "1": "6a3590bcd0a110fa485bb4178d630f177d9c534af856fad03f746156e239eb72" + "1": "0b363503b6c611b44e33561a2a6db3f260cfd4bbc5f4245deac5052fd5149803" }, "TailscalePeer": { - "1": "80206abb427762765dc3b18cf2e47f557594cbfb284adef40ac1eab65341db63" + "1": "8ff85aa2b913a6bb206b9de0495d9f74a17f55823891da98cb6fdbe78f46a44b" }, "TailscaleStatus": { - "1": "f36cfab93f50d14ed3493c7bdd1833d642b80b38bb861e33d3ba0b8bccc30eba" + "1": "ed262f4b9a569d9933f4a86cd2caa2ce213fc7a2319a1371f6a3cf3ccf884c8a" }, "OnDiskBlobDeposit": { - "1": "db940a4fd43f2b489875d27eab73193f0d69cc5eef7a754e99f83a4616ed5c35" + "1": "da3abda453def0d7c70c8a5dfcc3c8d00dd6822f60ddc01be3bdead4b0b5b482" }, "SeaweedFSBlobDeposit": { - "1": "8372147e6eb56adc347a6c6c70df40cca65a6ec0a4e2c7858362375da82d8ed0" + "1": "bcbec5dcdc06a0c87f89a10a6a8809706f24cedd97b5f850f8b48840a1f41941" }, "NumpyArrayObject": { - "1": "8e62c50b0f2dda5a2348fb0712136bcba753bb4392a6eb5952f3bcad2877a4d2" + "1": "d47a376401d92d47e5748e34f98ee270f8ebfd52cffbe6271b5faa8193e728c5" }, "NumpyScalarObject": { - "1": "05e4b92e020a5064ef376b5c8aa08883444fcd051d93403980ed6bf3a73201d9" + "1": "952bebb4dd3e3641c33b4ebcf2c051dbdebae5f1bf3b7b63ea89423360705411" }, "NumpyBoolObject": { - "1": "c83d6f2cf836e320cc338487f1921bc6abb27b2a6b6c0bbcad42f738bab39b6b" + "1": "b7a231baaa4b1f519d70c5afb15b4a9b7232f1128f7fd3709c1ea8b7345f8c6c" }, "PandasDataframeObject": { - "1": "2cea2e20683ff1abe355c15fcbb02722694bf9cd170bb13b69fdd0fb772de61c" + "1": "ff9d6c1884413f712d95d29190e30938b33de19e11dff9f88d9b89c51499cac5" }, "PandasSeriesObject": { - "1": "77d074c7f8a6e46c34ac7ec21aa7836928dc6492ca3e7df30a250bf7d47a4de2" + "1": "69eadfe14e5a7035767d2538e2db8775da6569cf5127f58d13315c4b85e5603d" }, "ReplyNotification": { - "1": "3695ca24420445654f4c2e62bac51f8a3f5899c45446b5a37e8c46625d51c8ea" + "1": "ce1e2a6b0d618478d3b1b992e4c8605817919c88a4884ca0540e0886ecdb8215" }, "Notification": { - "1": "69f470aa9f94651c237ccc6c2fb4e9aad7fa03196bcc7a21c3662877c43760ff" + "1": "1e5a65d91e27bf53d5b2ed0b45d9cee0cf77104b7111f99223194ceb0d0137fe" }, "CreateNotification": { - "1": "b0b9109841ae8c54bee62851ea4ff4e75aeaae0550ded16d6f71fab2f28118ba" + "1": "6858b743ac07d853a0302dc64de0e7d852135e2564ebad325d5ff35d17c29f6f" }, "Change": { - "1": "a90eefd804d636624bcb9503dd60c2f73e91a7afb3a5d569a6a5f099dcf49434" + "1": "2c470ff8aa076b88105640ce79d361a9b439927e501c238fa33ac7c1c45aa2c0" }, "ChangeStatus": { - "1": "bb5ce990ef34f2494abd3e2c82f2871cccc813c08af837ad929f05be9477071d" + "1": "7571229e92d8d52a0e90fa8856169b41045b42b50568b266823bdcea838dfb39" }, "ActionStoreChange": { - "1": "62bb0420bb5fcdad6d2cd2c628c059ace9030bfea3864f9a534cfa7ad8f7d7f8" + "1": "cf527995930cce09d90806713d30301493240079319bcc85e894428aee46017e" }, "Request": { - "1": "769a605653d6ddde60f0ff058dadfd6e136cf8f1f25e0b16f491b23fc85a8bf5" + "1": "340f4ac61ccbf6f566666327d6bca043dcd643e6f8e24897ef10bd6312e74995" }, "RequestInfo": { - "1": "c142149b3880adde9c48500acce2ddfa32f97e0917bbbd44834abcd7e054e018" + "1": "d571708de3c187ca5840c64784d99f7bfce8f33aa2ba48f9d56b824564551654" }, "RequestInfoFilter": { - "1": "1d91467abbca92b6bca664d697369e0d26187e338e0e96a9d765b2a1f616ed23" + "1": "c336af8d474071eb61e5f467513753e64d4e153e12892f9c6875b235057b0f0a" }, "SubmitRequest": { - "1": "3345ebf3a0ace3678eb6929294606143e3d1dec3da76843148b2ae773d86c471" + "1": "1870ce541169eab04cb69d3ae88ea30dc2fcdd997b620567ca9d87936d9600cf" }, "ObjectMutation": { - "1": "29fd705dd5c435b19191bc662ca1ec98726e446c173eb779f11af03501e98296" + "1": "275d9cf180904d1e34e1f3d7e838105e843127faf5a64029a1cf85d00234b8c9" }, "EnumMutation": { - "1": "792a9c1b533e02277db1650079d342cdcd34efc723462409e724a68c0cab6143" + "1": "3a1d1b47e0cdb5094298bce58bc9b76ecb66064459504f910c7d755eb1d5e276" }, "UserCodeStatusChange": { - "1": "c87a132da945ea099bd8f917081dbc1030d1c37c46f122112d37a65d0d357a00" + "1": "928dd4ceeb4858b18b806ca62b49a840f54269f7866744c9aa6edb0af9d7dfc1" }, "ProjectEvent": { - "1": "a3456153fec47f5aa62b6ee7ec8b4a620dea40ab91c05f8c5fd3a6c9b6921ed7" + "1": "97db1898509ff160c9defae6d02c7aa6e9623b7fbdf517263d9dc016881c5e01" }, "ProjectEventAddObject": { - "1": "a7f946535a6bf3421901457718276380eaad1bfb4f2ef0b0a4b0fbb0e6dc13f8" + "1": "c2241a4a66d04ed59334afde1f1fd2ce1384cb9b5862ad16eacb11ade868ea2f" }, "ProjectEventAddLink": { - "1": "3bb01deaca94356a1b18d2fe7e11bca7cdcf0c886dcea3afe24d10127f08bc86" + "1": "47e304a66bfa3ff89be5ea531de43ff72e8628f8feaca9c8934edad4a118b5f0" }, "ProjectSubEvent": { - "1": "dec533c0aa5dcd15dbffec58adfcb787580bc12bf918f300fa89bcd38934df88" + "1": "6dc448efa26bec63eb5e8127b5d541e40136a1a382e16e91db1d67b25bfe1213" }, "ProjectThreadMessage": { - "1": "4743c1dde5fb758ea2a204913dc94cc8185d3ef800aa9ac3a4406ca9599d794f" + "1": "9f8b11d603caae6d0e0f28957949dfc57c26fec9685f2c80501330b1d6bae665" }, "ProjectMessage": { - "1": "d5896bea45ed2a56e7eea718fc5b2ef82719e301ca8fb01ba6d7eb44b3b9659c" + "1": "d678beafc33f7e7df7e771a82d5cba6d5a36728a033d3976b6e5998726733d27" }, "ProjectRequestResponse": { - "1": "e086f9034fde676907bf6d3bb65f3ed08eed78f5fb099aa5741114b4c41b0b1f" + "1": "51b5a5d8cf0bde45abd2bd3a4411f93769fa542666a137ce9611d38fb48ffb4c" }, "ProjectRequest": { - "1": "9e3d9351857e9a95fe15ed6b562db2c96c59636a177d60a7ec44606a9263457a" + "1": "9eff1b3cc74c9706722363abb4062fc77c0a4f093d448b795ad662861649f111" }, "AnswerProjectPoll": { - "1": "7732c099f8ce9813124ef543836cb4993411f1335d92c8b90502563eebdf95e0" + "1": "f538a4fcae286cbc9755f51e2f2ce8809d66ce5d66f50173ef1824f89ce9b51d" }, "ProjectPoll": { - "1": "0747263d1c9b47db16aa3a95a5078e7d92e067ad1442b618a27f9c031e0a8470" + "1": "b456a699a249fd3fffe9739cdd9ec3ee8c05e59b2d9872ad9864167d78088091" }, "Project": { - "1": "57a7a11cf1afbec71dbdc6e19743a71370c96cfe0e295613bd5454acdd81355c" + "1": "bf59890e92d95b362cc7ef9c3d7fa6a1815978e02111a30cbcb047239e57d61e" }, "ProjectSubmit": { - "1": "dfe6df16db7d814cf04f4e93b763a8fed050ac157d838f14123608f23338d597" + "1": "5084844056ddefcea7fc634dd9945c03ef6d030bcd8f63aa07fe11fea0a5389f" }, "QueueItem": { - "1": "e7a0998ee55ecb4f2f7b01b3eff4d337ae5b2827523e2ce6013ef9ac4ae19498" + "1": "1d53446d5cd788120b15ea6b108a4a7abd480377370be7128f44297f8fb00b76" }, "ZMQClientConfig": { - "1": "08aedb35b045ea28754efe264e56dd3fe5583bc898f5e6b323505f00079415dc" + "1": "e3153f18c9fd04cf07b844153d093c8a090baac4c99d71ecd6491961e7f1dafb" }, "Plan": { - "1": "88e44bfd37e4963fc1d6ea1470beba00fa20bcb843a8fa41cd74634bb3e24962" + "1": "41713fc89a2cab7db592df6cd1c45e1309f86a50a8f531ddaf4052947186b0e0" } }, - "hash": "d6f228425a6d680c745611308eb01902b88d3bf3269a6506d12cb3c2f4566dac", + "hash": "63a765d7f47ae7cb3bac4045a9528d3e87c7330a987480bea18a117b9d6b6df8", "supported": true } } From ee03395ec75256690ce097ea5dac02733d254c53 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Sun, 24 Sep 2023 23:39:55 +0530 Subject: [PATCH 25/67] - added functionality to downgrade signature when API is fetched - calculate communication protocol on client initialization - pass communication protocol to get_api - add utility methods to data protocol and SyftMigrationRegistry Co-authored-by: Peter Chung Co-authored-by: Khoa Nguyen --- notebooks/Experimental/Data Migration.ipynb | 49 +++++++++--- packages/syft/src/syft/client/api.py | 78 ++++++++++++++++++- packages/syft/src/syft/client/client.py | 56 +++++++++++-- packages/syft/src/syft/node/node.py | 18 ++++- .../syft/src/syft/protocol/data_protocol.py | 19 +++-- .../syft/service/metadata/node_metadata.py | 13 +++- 6 files changed, 205 insertions(+), 28 deletions(-) diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index c8f1dc466d5..03251ecf02c 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -875,18 +875,49 @@ "metadata": {}, "outputs": [], "source": [ - "# client: [1]\n", - "# server: [1, 2]\n", - "# latest_protocol_on_server: 2\n", - "# latest_protocol_on_client: 1\n", - "# communication protocol: [1]\n", + "# Checking this on both client and server\n", "\n", "\n", - "# Checking this on both client and server\n", - "if communication protocol < latest_protocol_on_server:\n", - " migration can happen on server\n", - "else:\n", + "# During get_api, pass communication protocol\n", + "# on the server side:\n", + "# for_user(credentials, protocol):\n", + "# for given communication_protocol\n", + "# migrate the signature of the endpoints based on communication_protocol.\n", + "\n", + "if communication protocol < latest_protocol_on_client:\n", " migration can happens on client\n", + " # client: [1, 2]\n", + " # server: [1]\n", + " # latest_protocol_on_server: 1\n", + " # latest_protocol_on_client: 2\n", + " # communication protocol: [1]\n", + " # Iterate over the Input args and kwargs\n", + " # and downgrade the version to given communication protocol\n", + " user_v1 = user.using_protocol(version=communication_protocol)\n", + " # Recieve the result from the server and upgrade the version of the object\n", + " return user_v1.using_protocol(version=latest_protocol_on_client)\n", + "else:\n", + " migration can happen on server\n", + " # client: [1]\n", + " # server: [1, 2]\n", + " # latest_protocol_on_server: 2\n", + " # latest_protocol_on_client: 1\n", + " # communication protocol: [1]\n", + " migration_required = communication_protocol < latest_protocol_on_server\n", + " if migration_required:\n", + " user_v2 = user.using_protocol(version=latest_protocol_on_server)\n", + " # - object_version = get_object_version_for_protocol(\"User\", latest_protocol_on_server)\n", + " # get latest version of the object for given protocol\n", + " # - user.migrate_to(object_version)\n", + " ...\n", + " ...\n", + " ..\n", + " return user_v2.using_protocol(version=communication_protocol)\n", + " # - object_version = get_object_version_for_protocol(\"User\", latest_protocol_on_server)\n", + " # get latest version of the object for given protocol\n", + " # - user.migrate_to(object_version)\n", + "\n", + " \n", " \n", "Based on the communication protocol derive,\n", " - client_version for an object\n", diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 1a69fce69aa..a8fc26c37c9 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -4,6 +4,7 @@ # stdlib from collections import OrderedDict import inspect +from inspect import Parameter from inspect import signature import types from typing import Any @@ -14,6 +15,7 @@ from typing import Tuple from typing import Union from typing import _GenericAlias +from typing import get_args # third party from nacl.exceptions import BadSignatureError @@ -26,6 +28,7 @@ from ..abstract_node import AbstractNode from ..node.credentials import SyftSigningKey from ..node.credentials import SyftVerifyKey +from ..protocol.data_protocol import get_data_protocol from ..serde.deserialize import _deserialize from ..serde.recursive import index_syft_by_module_name from ..serde.serializable import serializable @@ -46,6 +49,7 @@ from ..types.identity import Identity from ..types.syft_object import SYFT_OBJECT_VERSION_1 from ..types.syft_object import SyftBaseObject +from ..types.syft_object import SyftMigrationRegistry from ..types.syft_object import SyftObject from ..types.uid import LineageID from ..types.uid import UID @@ -375,6 +379,52 @@ def debox_signed_syftapicall_response( return signed_result.message.data +def downgrade_signature(signature: Signature, object_versions: List): + def migrate_annotation(annotation): + annotation_args = get_args(annotation) + annotation_to_migrate = annotation_args if annotation_args else [annotation] + + new_args = [] + for arg in annotation_to_migrate: + if isinstance(arg, SyftBaseObject): + versions = SyftMigrationRegistry.get_versions(arg.__canonical_name__) + downgrade_version = versions[ + str(max(object_versions[arg.__canonical_name__])) + ] + new_args.append(arg.migrate_to(downgrade_version)) + else: + new_args.append(arg) + + new_annotation = ( + annotation.copy_with(tuple(new_args)) if annotation_args else new_args[0] + ) + + return new_annotation + + migrated_parameters = [] + for _, parameter in signature.parameters.items(): + annotation = migrate_annotation(parameter.annotation) + migrated_parameter = Parameter( + name=parameter.name, + default=parameter.default, + annotation=annotation, + kind=parameter.kind, + ) + migrated_parameters.append(migrated_parameter) + + migrated_return_annotation = migrate_annotation(signature.return_annotation) + + try: + new_signature = Signature( + parameters=migrated_parameters, + return_annotation=migrated_return_annotation, + ) + except Exception as e: + raise e + + return new_signature + + @instrument @serializable(attrs=["endpoints", "node_uid", "node_name", "lib_endpoints"]) class SyftAPI(SyftObject): @@ -400,7 +450,9 @@ class SyftAPI(SyftObject): @staticmethod def for_user( - node: AbstractNode, user_verify_key: Optional[SyftVerifyKey] = None + node: AbstractNode, + user_verify_key: Optional[SyftVerifyKey] = None, + communication_protocol: Optional[int] = None, ) -> SyftAPI: # relative # TODO: Maybe there is a possibility of merging ServiceConfig and APIEndpoint @@ -417,6 +469,18 @@ def for_user( node=node, role=role, credentials=user_verify_key ) + # If server uses a higher protocol version than client, then + # signatures needs to be downgraded. + signature_needs_downgrade = int(node.current_protocol) >= int( + communication_protocol + ) + data_protocol = get_data_protocol() + + if signature_needs_downgrade: + object_version_for_protocol = data_protocol.get_object_versions( + communication_protocol + ) + for ( path, service_config, @@ -426,13 +490,23 @@ def for_user( if service_warning: service_warning = service_warning.message_from(warning_context) service_warning.enabled = node.enable_warnings + + signature = ( + downgrade_signature( + signature=service_config.signature, + object_versions=object_version_for_protocol, + ) + if signature_needs_downgrade + else service_config.signature + ) + endpoint = APIEndpoint( service_path=path, module_path=path, name=service_config.public_name, description="", doc_string=service_config.doc_string, - signature=service_config.signature, + signature=signature, # TODO: Migrate signature based on communication protocol has_self=False, warning=service_warning, ) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 53687d36381..6c844ccfc02 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -34,6 +34,8 @@ from ..node.credentials import SyftSigningKey from ..node.credentials import SyftVerifyKey from ..node.credentials import UserLoginCredentials +from ..protocol.data_protocol import DataProtocol +from ..protocol.data_protocol import get_data_protocol from ..serde.deserialize import _deserialize from ..serde.serializable import serializable from ..serde.serialize import _serialize @@ -214,14 +216,22 @@ def get_node_metadata(self, credentials: SyftSigningKey) -> NodeMetadataJSON: metadata_json = json.loads(response) return NodeMetadataJSON(**metadata_json) - def get_api(self, credentials: SyftSigningKey) -> SyftAPI: - params = {"verify_key": str(credentials.verify_key)} + def get_api( + self, credentials: SyftSigningKey, communication_protocol: int + ) -> SyftAPI: + params = { + "verify_key": str(credentials.verify_key), + "communication_protocol": communication_protocol, + } if self.proxy_target_uid: obj = forward_message_to_proxy( self.make_call, proxy_target_uid=self.proxy_target_uid, path="api", - kwargs={"credentials": credentials}, + kwargs={ + "credentials": credentials, + "communication_protocol": communication_protocol, + }, credentials=credentials, ) else: @@ -333,18 +343,26 @@ def get_node_metadata(self, credentials: SyftSigningKey) -> NodeMetadataJSON: else: return self.node.metadata.to(NodeMetadataJSON) - def get_api(self, credentials: SyftSigningKey) -> SyftAPI: + def get_api( + self, credentials: SyftSigningKey, communication_protocol: int + ) -> SyftAPI: # todo: its a bit odd to identify a user by its verify key maybe? if self.proxy_target_uid: obj = forward_message_to_proxy( self.make_call, proxy_target_uid=self.proxy_target_uid, path="api", - kwargs={"credentials": credentials}, + kwargs={ + "credentials": credentials, + "communication_protocol": communication_protocol, + }, credentials=credentials, ) else: - obj = self.node.get_api(for_user=credentials.verify_key) + obj = self.node.get_api( + for_user=credentials.verify_key, + communication_protocol=communication_protocol, + ) obj.connection = self obj.signing_key = credentials if self.proxy_target_uid: @@ -444,6 +462,8 @@ def __init__( self.metadata = metadata self.credentials: Optional[SyftSigningKey] = credentials self._api = api + self.communication_protocol = None + self.current_protocol = None self.post_init() @@ -454,6 +474,25 @@ def post_init(self) -> None: if self.metadata is None: self._fetch_node_metadata(self.credentials) + self.communication_protocol = self.__get_communication_protocol( + self.metadata.supported_protocols + ) + + def __get_communication_protocol(self, protocols_supported_by_server: List) -> int: + data_protocol: DataProtocol = get_data_protocol() + protocols_supported_by_client: List[int] = data_protocol.supported_protocols + self.current_protocol = data_protocol.latest_version + common_protocols = set(protocols_supported_by_client).intersection( + protocols_supported_by_server + ) + + if len(common_protocols) == 0: + raise Exception( + "No common communication protocol found between the client and the server." + ) + + return max(common_protocols) + def create_project( self, name: str, description: str, user_email_address: str ) -> Any: @@ -749,7 +788,10 @@ def _fetch_node_metadata(self, credentials: SyftSigningKey) -> None: self.metadata = metadata def _fetch_api(self, credentials: SyftSigningKey): - _api: SyftAPI = self.connection.get_api(credentials=credentials) + _api: SyftAPI = self.connection.get_api( + credentials=credentials, + communication_protocol=self.communication_protocol, + ) def refresh_callback(): return self._fetch_api(self.credentials) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index ae4fd2004bc..4c321324709 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -42,6 +42,7 @@ from ..client.api import SyftAPIData from ..client.api import debox_signed_syftapicall_response from ..external import OBLV +from ..protocol.data_protocol import get_data_protocol from ..serde.deserialize import _deserialize from ..serde.serialize import _serialize from ..service.action.action_service import ActionService @@ -455,6 +456,11 @@ def root_client(self): def guest_client(self): return self.get_guest_client() + @property + def current_protocol(self) -> List: + data_protocol = get_data_protocol() + return data_protocol.latest_version + def get_guest_client(self, verbose: bool = True): # relative from ..client.client import PythonConnection @@ -824,8 +830,16 @@ def handle_api_call_with_unsigned_result( return item return result - def get_api(self, for_user: Optional[SyftVerifyKey] = None) -> SyftAPI: - return SyftAPI.for_user(node=self, user_verify_key=for_user) + def get_api( + self, + for_user: Optional[SyftVerifyKey] = None, + communication_protocol: Optional[int] = None, + ) -> SyftAPI: + return SyftAPI.for_user( + node=self, + user_verify_key=for_user, + communication_protocol=communication_protocol, + ) def get_method_with_context( self, function: Callable, context: NodeServiceContext diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 04c3e488b0c..a8ba3d6142f 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -4,7 +4,9 @@ import os from pathlib import Path from typing import Dict +from typing import List from typing import Type +from typing import Union # relative from ..types.syft_object import SyftBaseObject @@ -199,19 +201,22 @@ def validate_current_state(self) -> bool: return True @property - def supported_protocols(self): + def supported_protocols(self) -> List[int]: """Returns a list of protocol numbers that are marked as supported.""" return [ int(protocol_version) for protocol_version, protocol_state in self.state.items() - if protocol_state["supported"] + if str_to_bool(protocol_state["supported"]) ] + def get_object_versions(self, protocol: Union[int, str]) -> List: + return self.state[str(protocol)]["object_versions"] -def upgrade_protocol(): - data_protocol = DataProtocol(filename=data_protocol_file_name()) - data_protocol.upgrade() +def get_data_protocol(): + return DataProtocol(filename=data_protocol_file_name()) -def validate_protocol(): - pass + +def upgrade_protocol(): + data_protocol = get_data_protocol() + data_protocol.upgrade() diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index a27fe8a704e..4648e0dcbc2 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -3,16 +3,19 @@ # stdlib from typing import Callable +from typing import Dict from typing import List from typing import Optional # third party from packaging import version from pydantic import BaseModel +from pydantic import root_validator # relative from ...abstract_node import NodeType from ...node.credentials import SyftVerifyKey +from ...protocol.data_protocol import get_data_protocol from ...serde.serializable import serializable from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 @@ -136,6 +139,14 @@ class NodeMetadataJSON(BaseModel, StorableObjectType): admin_email: str node_side_type: str show_warnings: bool + supported_protocols: List + + @root_validator(pre=True) + def add_protocol_versions(cls, values: Dict) -> Dict: + if "supported_protocols" not in values: + data_protocol = get_data_protocol() + values["supported_protocols"] = data_protocol.supported_protocols + return values def check_version(self, client_version: str) -> bool: return check_version( @@ -157,7 +168,7 @@ def metadata_to_json() -> List[Callable]: @transform(NodeMetadataJSON, NodeMetadata) def json_to_metadata() -> List[Callable]: return [ - drop(["metadata_version"]), + drop(["metadata_version", "supported_protocols"]), convert_types(["id", "verify_key"], [UID, SyftVerifyKey]), convert_types(["node_type"], NodeType), ] From 65a0bf87a385318e2b4ba004a2c2054614689daa Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 25 Sep 2023 14:01:52 +0530 Subject: [PATCH 26/67] pass dev mode flag during protocol init and upgrade --- packages/syft/src/syft/client/api.py | 2 +- packages/syft/src/syft/protocol/data_protocol.py | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index a8fc26c37c9..c3cc395ab21 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -471,7 +471,7 @@ def for_user( # If server uses a higher protocol version than client, then # signatures needs to be downgraded. - signature_needs_downgrade = int(node.current_protocol) >= int( + signature_needs_downgrade = int(node.current_protocol) > int( communication_protocol ) data_protocol = get_data_protocol() diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index a8ba3d6142f..b8ec810c056 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -24,8 +24,12 @@ def get_dev_mode() -> bool: PROTOCOL_STATE_FILENAME_DEV = "protocol_state_dev.json" -def data_protocol_file_name(): - return PROTOCOL_STATE_FILENAME_DEV if get_dev_mode() else PROTOCOL_STATE_FILENAME +def data_protocol_file_name(dev_mode: bool = False): + return ( + PROTOCOL_STATE_FILENAME_DEV + if (get_dev_mode() or dev_mode) + else PROTOCOL_STATE_FILENAME + ) def data_protocol_dir(): @@ -213,10 +217,10 @@ def get_object_versions(self, protocol: Union[int, str]) -> List: return self.state[str(protocol)]["object_versions"] -def get_data_protocol(): - return DataProtocol(filename=data_protocol_file_name()) +def get_data_protocol(dev_mode: bool = False): + return DataProtocol(filename=data_protocol_file_name(dev_mode=dev_mode)) -def upgrade_protocol(): - data_protocol = get_data_protocol() +def upgrade_protocol(dev_mode: bool = True): + data_protocol = get_data_protocol(dev_mode=dev_mode) data_protocol.upgrade() From ab8d8775a6535fe210ac79b10ab0d2f438d8428e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 26 Sep 2023 11:35:42 +0530 Subject: [PATCH 27/67] migrate object if version doesn't match with the one defined in annotation --- packages/syft/src/syft/client/api.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index c3cc395ab21..5067a62f7b7 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -815,6 +815,14 @@ def validate_callable_args_and_kwargs(args, kwargs, signature: Signature): if issubclass(v, EmailStr): v = str try: + annotation_version = v.__version__ + value = ( + value.migrate_to(annotation_version) + if isinstance(value, SyftBaseObject) + and annotation_version + else value + ) + check_type(key, value, v) # raises Exception success = True break # only need one to match @@ -823,6 +831,12 @@ def validate_callable_args_and_kwargs(args, kwargs, signature: Signature): if not success: raise TypeError() else: + annotation_version = t.__version__ + value = ( + value.migrate_to(annotation_version) + if isinstance(value, SyftBaseObject) and annotation_version + else value + ) check_type(key, value, t) # raises Exception except TypeError: _type_str = getattr(t, "__name__", str(t)) @@ -851,9 +865,23 @@ def validate_callable_args_and_kwargs(args, kwargs, signature: Signature): for v in t.__args__: if issubclass(v, EmailStr): v = str + annotation_version = v.__version__ + arg = ( + arg.migrate_to(annotation_version) + if isinstance(arg, SyftBaseObject) + and annotation_version + else arg + ) check_type(param_key, arg, v) # raises Exception break # only need one to match else: + annotation_version = t.__version__ + + arg = ( + arg.migrate_to(annotation_version) + if isinstance(value, SyftBaseObject) and annotation_version + else arg + ) check_type(param_key, arg, t) # raises Exception except TypeError: t_arg = type(arg) From cacfb1d383cb9d71c5e5ce2a23834814c2b39e19 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 26 Sep 2023 18:30:50 +0530 Subject: [PATCH 28/67] - pass communication protocol to SyftAPI - method to migrate args and kwargs for given protocol - migrate args and kwargs and result on making api call - migrate args and kwargs and result in service_method Co-authored-by: Kien Dang --- packages/syft/src/syft/client/api.py | 57 ++++++++----------- packages/syft/src/syft/client/client.py | 1 + packages/syft/src/syft/node/routes.py | 16 ++++-- .../syft/src/syft/protocol/data_protocol.py | 42 ++++++++++++++ packages/syft/src/syft/service/service.py | 11 ++++ packages/syft/src/syft/types/syft_object.py | 9 ++- 6 files changed, 98 insertions(+), 38 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 5067a62f7b7..3c046684994 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -29,6 +29,7 @@ from ..node.credentials import SyftSigningKey from ..node.credentials import SyftVerifyKey from ..protocol.data_protocol import get_data_protocol +from ..protocol.data_protocol import migrate_args_and_kwargs from ..serde.deserialize import _deserialize from ..serde.recursive import index_syft_by_module_name from ..serde.serializable import serializable @@ -210,6 +211,7 @@ def generate_remote_function( path: str, make_call: Callable, pre_kwargs: Dict[str, Any], + communication_protocol: int, warning: Optional[APIEndpointWarning], ): if "blocking" in signature.parameters: @@ -223,6 +225,11 @@ def wrapper(*args, **kwargs): blocking = bool(kwargs["blocking"]) del kwargs["blocking"] + # Migrate args and kwargs to communication protocol + args, kwargs = migrate_args_and_kwargs( + to_protocol=communication_protocol, args=args, kwargs=kwargs + ) + res = validate_callable_args_and_kwargs(args, kwargs, signature) if isinstance(res, SyftError): @@ -244,6 +251,11 @@ def wrapper(*args, **kwargs): if not allowed: return result = make_call(api_call=api_call) + + result, _ = migrate_args_and_kwargs( + [result], kwargs={}, to_latest_protocol=True + ) + result = result[0] return result wrapper.__ipython_inspector_signature_override__ = signature @@ -257,6 +269,7 @@ def generate_remote_lib_function( path: str, module_path: str, make_call: Callable, + communication_protocol: int, pre_kwargs: Dict[str, Any], ): if "blocking" in signature.parameters: @@ -444,6 +457,7 @@ class SyftAPI(SyftObject): # serde / storage rules refresh_api_callback: Optional[Callable] = None __user_role: ServiceRole = ServiceRole.NONE + communication_protocol: int # def __post_init__(self) -> None: # pass @@ -451,8 +465,8 @@ class SyftAPI(SyftObject): @staticmethod def for_user( node: AbstractNode, + communication_protocol: int, user_verify_key: Optional[SyftVerifyKey] = None, - communication_protocol: Optional[int] = None, ) -> SyftAPI: # relative # TODO: Maybe there is a possibility of merging ServiceConfig and APIEndpoint @@ -553,6 +567,7 @@ def for_user( endpoints=endpoints, lib_endpoints=lib_endpoints, __user_role=role, + communication_protocol=communication_protocol, ) @property @@ -606,7 +621,7 @@ def _add_route( _self._add_submodule(_last_module, endpoint_method) def generate_endpoints(self) -> None: - def build_endpoint_tree(endpoints): + def build_endpoint_tree(endpoints, communication_protocol): api_module = APIModule(path="") for _, v in endpoints.items(): signature = v.signature @@ -621,6 +636,7 @@ def build_endpoint_tree(endpoints): self.make_call, pre_kwargs=v.pre_kwargs, warning=v.warning, + communication_protocol=communication_protocol, ) elif isinstance(v, LibEndpoint): endpoint_function = generate_remote_lib_function( @@ -631,6 +647,7 @@ def build_endpoint_tree(endpoints): v.module_path, self.make_call, pre_kwargs=v.pre_kwargs, + communication_protocol=communication_protocol, ) endpoint_function.__doc__ = v.doc_string @@ -638,8 +655,12 @@ def build_endpoint_tree(endpoints): return api_module if self.lib_endpoints is not None: - self.libs = build_endpoint_tree(self.lib_endpoints) - self.api_module = build_endpoint_tree(self.endpoints) + self.libs = build_endpoint_tree( + self.lib_endpoints, self.communication_protocol + ) + self.api_module = build_endpoint_tree( + self.endpoints, self.communication_protocol + ) @property def services(self) -> APIModule: @@ -815,14 +836,6 @@ def validate_callable_args_and_kwargs(args, kwargs, signature: Signature): if issubclass(v, EmailStr): v = str try: - annotation_version = v.__version__ - value = ( - value.migrate_to(annotation_version) - if isinstance(value, SyftBaseObject) - and annotation_version - else value - ) - check_type(key, value, v) # raises Exception success = True break # only need one to match @@ -831,12 +844,6 @@ def validate_callable_args_and_kwargs(args, kwargs, signature: Signature): if not success: raise TypeError() else: - annotation_version = t.__version__ - value = ( - value.migrate_to(annotation_version) - if isinstance(value, SyftBaseObject) and annotation_version - else value - ) check_type(key, value, t) # raises Exception except TypeError: _type_str = getattr(t, "__name__", str(t)) @@ -865,23 +872,9 @@ def validate_callable_args_and_kwargs(args, kwargs, signature: Signature): for v in t.__args__: if issubclass(v, EmailStr): v = str - annotation_version = v.__version__ - arg = ( - arg.migrate_to(annotation_version) - if isinstance(arg, SyftBaseObject) - and annotation_version - else arg - ) check_type(param_key, arg, v) # raises Exception break # only need one to match else: - annotation_version = t.__version__ - - arg = ( - arg.migrate_to(annotation_version) - if isinstance(value, SyftBaseObject) and annotation_version - else arg - ) check_type(param_key, arg, t) # raises Exception except TypeError: t_arg = type(arg) diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 6c844ccfc02..52a3b4bcb22 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -239,6 +239,7 @@ def get_api( obj = _deserialize(content, from_bytes=True) obj.connection = self obj.signing_key = credentials + obj.communication_protocol = communication_protocol if self.proxy_target_uid: obj.node_uid = self.proxy_target_uid return cast(SyftAPI, obj) diff --git a/packages/syft/src/syft/node/routes.py b/packages/syft/src/syft/node/routes.py index 8bbe0df8e6d..9bd90cb07c7 100644 --- a/packages/syft/src/syft/node/routes.py +++ b/packages/syft/src/syft/node/routes.py @@ -68,15 +68,21 @@ def syft_metadata_capnp() -> Response: media_type="application/octet-stream", ) - def handle_syft_new_api(user_verify_key: SyftVerifyKey) -> Response: + def handle_syft_new_api( + user_verify_key: SyftVerifyKey, communication_protocol: int + ) -> Response: return Response( - serialize(worker.get_api(user_verify_key), to_bytes=True), + serialize( + worker.get_api(user_verify_key, communication_protocol), to_bytes=True + ), media_type="application/octet-stream", ) # get the SyftAPI object @router.get("/api") - def syft_new_api(request: Request, verify_key: str) -> Response: + def syft_new_api( + request: Request, verify_key: str, communication_protocol: int + ) -> Response: user_verify_key: SyftVerifyKey = SyftVerifyKey.from_string(verify_key) if TRACE_MODE: with trace.get_tracer(syft_new_api.__module__).start_as_current_span( @@ -84,9 +90,9 @@ def syft_new_api(request: Request, verify_key: str) -> Response: context=extract(request.headers), kind=trace.SpanKind.SERVER, ): - return handle_syft_new_api(user_verify_key) + return handle_syft_new_api(user_verify_key, communication_protocol) else: - return handle_syft_new_api(user_verify_key) + return handle_syft_new_api(user_verify_key, communication_protocol) def handle_new_api_call(data: bytes) -> Response: obj_msg = deserialize(blob=data, from_bytes=True) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index b8ec810c056..30ee5fb75d2 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -5,10 +5,13 @@ from pathlib import Path from typing import Dict from typing import List +from typing import Optional +from typing import Tuple from typing import Type from typing import Union # relative +from ..service.response import SyftException from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftMigrationRegistry from ..util.util import get_env @@ -224,3 +227,42 @@ def get_data_protocol(dev_mode: bool = False): def upgrade_protocol(dev_mode: bool = True): data_protocol = get_data_protocol(dev_mode=dev_mode) data_protocol.upgrade() + + +def migrate_args_and_kwargs( + args: Tuple, + kwargs: Dict, + to_protocol: Optional[int] = None, + to_latest_protocol: bool = False, +) -> Tuple[Tuple, Dict]: + """Migrate args and kwargs to latest version for given protocol. + + If `to_protocol` is None, then migrate to latest protocol version. + + """ + data_protocol = get_data_protocol() + + if to_protocol is None: + to_protocol = data_protocol.latest_version if to_latest_protocol else None + + if to_protocol is None: + raise SyftException(message="Protocol version missing.") + + object_versions = data_protocol.get_object_versions(protocol=to_protocol) + + migrated_kwargs, migrated_args = {}, [] + + for param_name, param_val in kwargs.items(): + if isinstance(param_val, SyftBaseObject): + migrate_to_version = max(object_versions[param_val.__canonical_name__]) + param_val = param_val.migrate_to(migrate_to_version) + migrated_kwargs[param_name] = param_val + + for arg in args: + if isinstance(arg, SyftBaseObject): + migrate_to_version = max(object_versions[arg.__canonical_name__]) + arg = param_val.migrate_to(migrate_to_version) + + migrated_args.append(arg) + + return tuple(migrated_args), migrated_kwargs diff --git a/packages/syft/src/syft/service/service.py b/packages/syft/src/syft/service/service.py index 974fe2d02b6..d6db472c152 100644 --- a/packages/syft/src/syft/service/service.py +++ b/packages/syft/src/syft/service/service.py @@ -21,6 +21,7 @@ # relative from ..abstract_node import AbstractNode from ..node.credentials import SyftVerifyKey +from ..protocol.data_protocol import migrate_args_and_kwargs from ..serde.lib_permissions import CMPCRUDPermission from ..serde.lib_permissions import CMPPermission from ..serde.lib_service_registry import CMPBase @@ -329,6 +330,11 @@ def wrapper(func): input_signature = deepcopy(signature) def _decorator(self, *args, **kwargs): + communication_protocol = kwargs.pop("communication_protocol", None) + if communication_protocol: + args, kwargs = migrate_args_and_kwargs( + *args, **kwargs, to_protocol=communication_protocol + ) if autosplat is not None and len(autosplat) > 0: args, kwargs = reconstruct_args_kwargs( signature=input_signature, @@ -337,6 +343,11 @@ def _decorator(self, *args, **kwargs): kwargs=kwargs, ) result = func(self, *args, **kwargs) + if communication_protocol: + result, _ = migrate_args_and_kwargs( + [result], kwargs={}, to_latest_protocol=True + ) + result = result[0] context = kwargs.get("context", None) context = args[0] if context is None else context attrs_to_attach = { diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 69c02593308..468023ae33a 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -584,7 +584,14 @@ def migrate_to(self, version: int, context: Optional[Context] = None) -> Any: migration_transform = SyftMigrationRegistry.get_migration_for_version( type_from=type(self), version_to=version ) - return migration_transform(self, context) + return ( + migration_transform( + self, + context, + ) + if self.__version__ != version + else self + ) def short_qual_name(name: str) -> str: From 0f1c9da7fcb71f85342ee10adcbbdea5c8acff43 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 26 Sep 2023 18:42:45 +0530 Subject: [PATCH 29/67] skip migration if latest protocol are migration protocol are same --- packages/syft/src/syft/protocol/data_protocol.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 30ee5fb75d2..9d0e7b03c47 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -248,6 +248,11 @@ def migrate_args_and_kwargs( if to_protocol is None: raise SyftException(message="Protocol version missing.") + # If latest protocol being used is equal to the protocol to be migrate + # then skip migration of the object + if to_protocol == data_protocol.latest_version: + return args, kwargs + object_versions = data_protocol.get_object_versions(protocol=to_protocol) migrated_kwargs, migrated_args = {}, [] From 8e03a82367575e3a4833806c5f1cf07cb4053ec6 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 27 Sep 2023 12:17:58 +0530 Subject: [PATCH 30/67] - pass communication protocol to SyftAPI - type cast protocol version to str while indexing - update migrate_args_and_kwargs to migrate objects via a range --- packages/syft/src/syft/client/api.py | 12 +++++++++++- packages/syft/src/syft/client/client.py | 1 + packages/syft/src/syft/node/node.py | 10 +++++----- .../syft/src/syft/protocol/data_protocol.py | 19 ++++++++++++------- .../src/syft/protocol/protocol_state_dev.json | 4 ++-- .../src/syft/service/metadata/__init__.py | 2 ++ .../syft/service/metadata/node_metadata.py | 16 +++------------- packages/syft/src/syft/service/service.py | 7 +++++-- 8 files changed, 41 insertions(+), 30 deletions(-) create mode 100644 packages/syft/src/syft/service/metadata/__init__.py diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 3c046684994..f3350d51c32 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -239,6 +239,8 @@ def wrapper(*args, **kwargs): if pre_kwargs: _valid_kwargs.update(pre_kwargs) + _valid_kwargs["communication_protocol"] = communication_protocol + api_call = SyftAPICall( node_uid=node_uid, path=path, @@ -439,7 +441,15 @@ def migrate_annotation(annotation): @instrument -@serializable(attrs=["endpoints", "node_uid", "node_name", "lib_endpoints"]) +@serializable( + attrs=[ + "endpoints", + "node_uid", + "node_name", + "lib_endpoints", + "communication_protocol", + ] +) class SyftAPI(SyftObject): # version __canonical_name__ = "SyftAPI" diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 52a3b4bcb22..7dd75b5a056 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -366,6 +366,7 @@ def get_api( ) obj.connection = self obj.signing_key = credentials + obj.communication_protocol = communication_protocol if self.proxy_target_uid: obj.node_uid = self.proxy_target_uid return obj diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 4c321324709..2b98ffe60b1 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -60,7 +60,7 @@ from ..service.dataset.dataset_service import DatasetService from ..service.enclave.enclave_service import EnclaveService from ..service.metadata.metadata_service import MetadataService -from ..service.metadata.node_metadata import NodeMetadata +from ..service.metadata.node_metadata import NodeMetadataV2 from ..service.network.network_service import NetworkService from ..service.notification.notification_service import NotificationService from ..service.policy.policy_service import PolicyService @@ -630,7 +630,7 @@ def _get_service_method_from_path(self, path: str) -> Callable: return getattr(service_obj, method_name) @property - def metadata(self) -> NodeMetadata: + def metadata(self) -> NodeMetadataV2: name = "" deployed_on = "" organization = "" @@ -653,12 +653,12 @@ def metadata(self) -> NodeMetadata: admin_email = settings_data.admin_email show_warnings = settings_data.show_warnings - return NodeMetadata( + return NodeMetadataV2( name=name, id=self.id, verify_key=self.verify_key, - highest_object_version=HIGHEST_SYFT_OBJECT_VERSION, - lowest_object_version=LOWEST_SYFT_OBJECT_VERSION, + highest_version=HIGHEST_SYFT_OBJECT_VERSION, + lowest_version=LOWEST_SYFT_OBJECT_VERSION, syft_version=__version__, deployed_on=deployed_on, description=description, diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 9d0e7b03c47..2086a7929a8 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -112,7 +112,7 @@ def recompute_supported_states( current_protocol_version: int, new_object_to_version_map: Dict, ): - current_protocol_state = self.state[current_protocol_version] + current_protocol_state = self.state[str(current_protocol_version)] deleted_object_classes, deleted_versions_map = self.find_deleted_versions( current_protocol_state, new_object_to_version_map=new_object_to_version_map, @@ -151,7 +151,7 @@ def state_defined(self): @property def latest_version(self): - return max(self.state.keys()) + return int(max(self.state.keys())) @staticmethod def _hash_to_sha256(obj_dict: Dict) -> str: @@ -169,7 +169,7 @@ def upgrade(self): new_protocol_version = int(current_protocol_version) + 1 - current_protocol_state = self.state[current_protocol_version] + current_protocol_state = self.state[str(current_protocol_version)] if current_protocol_state["hash"] == new_protocol_hash: print("No change in schema. Skipping upgrade.") return @@ -259,14 +259,19 @@ def migrate_args_and_kwargs( for param_name, param_val in kwargs.items(): if isinstance(param_val, SyftBaseObject): - migrate_to_version = max(object_versions[param_val.__canonical_name__]) - param_val = param_val.migrate_to(migrate_to_version) + current_version = int(param_val.__version__) + migrate_to_version = int(max(object_versions[param_val.__canonical_name__])) + for version in range(current_version + 1, migrate_to_version + 1): + param_val = param_val.migrate_to(version) migrated_kwargs[param_name] = param_val for arg in args: if isinstance(arg, SyftBaseObject): - migrate_to_version = max(object_versions[arg.__canonical_name__]) - arg = param_val.migrate_to(migrate_to_version) + current_version = int(arg.__version__) + migrate_to_version = int(max(object_versions[arg.__canonical_name__])) + for version in range(current_version + 1, migrate_to_version + 1): + param_val = param_val.migrate_to(version) + arg = arg.migrate_to(migrate_to_version) migrated_args.append(arg) diff --git a/packages/syft/src/syft/protocol/protocol_state_dev.json b/packages/syft/src/syft/protocol/protocol_state_dev.json index 44a1c5f019c..11c13ba2800 100644 --- a/packages/syft/src/syft/protocol/protocol_state_dev.json +++ b/packages/syft/src/syft/protocol/protocol_state_dev.json @@ -39,7 +39,7 @@ "1": "fa9520d29d9df56fb9d5d2080aecfc3be14c49e7267a3e5b9fd05ad1b0828e11" }, "SyftAPI": { - "1": "3f96e5d7a189258f66b6c119f835a0ae301843b0c016c634211f124ae83cd179" + "1": "142a9bb5a4a63d5d072fbfbdbb48ec9535f1ae51e40f9d4f59760fb807c9a189" }, "User": { "1": "21cb3659dc4ddd0b4d58c677f46feecac9f682ea36660f42845067b29b5ad8e7" @@ -351,7 +351,7 @@ "1": "41713fc89a2cab7db592df6cd1c45e1309f86a50a8f531ddaf4052947186b0e0" } }, - "hash": "63a765d7f47ae7cb3bac4045a9528d3e87c7330a987480bea18a117b9d6b6df8", + "hash": "845f621ac334abbecb6ed7adcefa03faccaa9ee9dd722a1780e3bde9535093da", "supported": true } } diff --git a/packages/syft/src/syft/service/metadata/__init__.py b/packages/syft/src/syft/service/metadata/__init__.py new file mode 100644 index 00000000000..80eb6c422b0 --- /dev/null +++ b/packages/syft/src/syft/service/metadata/__init__.py @@ -0,0 +1,2 @@ +# relative +from .migrations import * # noqa: F403 diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index 4648e0dcbc2..016854f1a20 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -23,7 +23,6 @@ from ...types.syft_object import SyftObject from ...types.transforms import convert_types from ...types.transforms import drop -from ...types.transforms import rename from ...types.transforms import transform from ...types.uid import UID @@ -127,8 +126,8 @@ class NodeMetadataJSON(BaseModel, StorableObjectType): name: str id: str verify_key: str - highest_object_version: int - lowest_object_version: int + highest_object_version: Optional[int] + lowest_object_version: Optional[int] syft_version: str node_type: str = NodeType.DOMAIN.value deployed_on: str = "Date" @@ -156,16 +155,7 @@ def check_version(self, client_version: str) -> bool: ) -@transform(NodeMetadata, NodeMetadataJSON) -def metadata_to_json() -> List[Callable]: - return [ - drop(["__canonical_name__"]), - rename("__version__", "metadata_version"), - convert_types(["id", "verify_key", "node_type"], str), - ] - - -@transform(NodeMetadataJSON, NodeMetadata) +@transform(NodeMetadataJSON, NodeMetadataV2) def json_to_metadata() -> List[Callable]: return [ drop(["metadata_version", "supported_protocols"]), diff --git a/packages/syft/src/syft/service/service.py b/packages/syft/src/syft/service/service.py index d6db472c152..1011cddc1d8 100644 --- a/packages/syft/src/syft/service/service.py +++ b/packages/syft/src/syft/service/service.py @@ -331,9 +331,10 @@ def wrapper(func): def _decorator(self, *args, **kwargs): communication_protocol = kwargs.pop("communication_protocol", None) + if communication_protocol: args, kwargs = migrate_args_and_kwargs( - *args, **kwargs, to_protocol=communication_protocol + args=args, kwargs=kwargs, to_latest_protocol=True ) if autosplat is not None and len(autosplat) > 0: args, kwargs = reconstruct_args_kwargs( @@ -345,7 +346,9 @@ def _decorator(self, *args, **kwargs): result = func(self, *args, **kwargs) if communication_protocol: result, _ = migrate_args_and_kwargs( - [result], kwargs={}, to_latest_protocol=True + args=(result,), + kwargs={}, + to_protocol=communication_protocol, ) result = result[0] context = kwargs.get("context", None) From 3d5abc1a30145ac8cf2b795419f060206d315d49 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 27 Sep 2023 16:22:00 +0530 Subject: [PATCH 31/67] recursively unwrap annotation and migrate to correct version --- packages/syft/src/syft/client/api.py | 55 ++++++++++++--------- packages/syft/src/syft/types/syft_object.py | 6 +-- 2 files changed, 34 insertions(+), 27 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index f3350d51c32..f48a8d02bba 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -394,31 +394,12 @@ def debox_signed_syftapicall_response( return signed_result.message.data -def downgrade_signature(signature: Signature, object_versions: List): - def migrate_annotation(annotation): - annotation_args = get_args(annotation) - annotation_to_migrate = annotation_args if annotation_args else [annotation] - - new_args = [] - for arg in annotation_to_migrate: - if isinstance(arg, SyftBaseObject): - versions = SyftMigrationRegistry.get_versions(arg.__canonical_name__) - downgrade_version = versions[ - str(max(object_versions[arg.__canonical_name__])) - ] - new_args.append(arg.migrate_to(downgrade_version)) - else: - new_args.append(arg) - - new_annotation = ( - annotation.copy_with(tuple(new_args)) if annotation_args else new_args[0] - ) - - return new_annotation - +def downgrade_signature(signature: Signature, object_versions: Dict): migrated_parameters = [] for _, parameter in signature.parameters.items(): - annotation = migrate_annotation(parameter.annotation) + annotation = unwrap_and_migrate_annotation( + parameter.annotation, object_versions + ) migrated_parameter = Parameter( name=parameter.name, default=parameter.default, @@ -427,7 +408,9 @@ def migrate_annotation(annotation): ) migrated_parameters.append(migrated_parameter) - migrated_return_annotation = migrate_annotation(signature.return_annotation) + migrated_return_annotation = unwrap_and_migrate_annotation( + signature.return_annotation, object_versions + ) try: new_signature = Signature( @@ -440,6 +423,30 @@ def migrate_annotation(annotation): return new_signature +def unwrap_and_migrate_annotation(annotation, object_versions): + args = get_args(annotation) + if len(args) == 0: + print(annotation) + if isinstance(annotation, type) and issubclass(annotation, SyftBaseObject): + downgrade_to_version = int( + max(object_versions[annotation.__canonical_name__]) + ) + downgrade_klass_name = SyftMigrationRegistry.__migration_version_registry__[ + annotation.__canonical_name__ + ][downgrade_to_version] + new_arg = index_syft_by_module_name(downgrade_klass_name) + return new_arg + else: + return annotation + + migrated_annotations = [] + for arg in args: + migrated_annotation = unwrap_and_migrate_annotation(arg, object_versions) + migrated_annotations.append(migrated_annotation) + + return annotation.copy_with(tuple(migrated_annotations)) + + @instrument @serializable( attrs=[ diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 468023ae33a..c4146c610bd 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -229,11 +229,11 @@ def register_transform( """ Populate the __migration_transform_registry__ dictionary with format __migration_version_registry__ = { - "canonical_name": {"version_from x version_to": } + "canonical_name": {"version_from x version_to": } } For example - {'NodeMetadata': {'1x2': , - '2x1': }} + {'NodeMetadata': {'1x2': , + '2x1': }} """ if klass_type_str not in cls.__migration_version_registry__: raise Exception(f"{klass_type_str} is not yet registered.") From cda159a15455c7d25dcf18525dcf9be41f374807 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 27 Sep 2023 17:00:04 +0530 Subject: [PATCH 32/67] fix versioning calculation --- packages/syft/src/syft/protocol/data_protocol.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 2086a7929a8..1b781bc2469 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -261,7 +261,11 @@ def migrate_args_and_kwargs( if isinstance(param_val, SyftBaseObject): current_version = int(param_val.__version__) migrate_to_version = int(max(object_versions[param_val.__canonical_name__])) - for version in range(current_version + 1, migrate_to_version + 1): + if current_version > migrate_to_version: # downgrade + versions = range(current_version - 1, migrate_to_version - 1, -1) + else: # upgrade + versions = range(current_version + 1, migrate_to_version + 1) + for version in versions: param_val = param_val.migrate_to(version) migrated_kwargs[param_name] = param_val @@ -269,9 +273,12 @@ def migrate_args_and_kwargs( if isinstance(arg, SyftBaseObject): current_version = int(arg.__version__) migrate_to_version = int(max(object_versions[arg.__canonical_name__])) - for version in range(current_version + 1, migrate_to_version + 1): - param_val = param_val.migrate_to(version) - arg = arg.migrate_to(migrate_to_version) + if current_version > migrate_to_version: # downgrade + versions = range(current_version - 1, migrate_to_version - 1, -1) + else: # upgrade + versions = range(current_version + 1, migrate_to_version + 1) + for version in versions: + arg = arg.migrate_to(version) migrated_args.append(arg) From 4c9ba8bdc5ca31728d8fc67f4599fadaf58ba727 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 27 Sep 2023 17:43:47 +0530 Subject: [PATCH 33/67] add logic to validate and save migration state of objects in the partition --- packages/syft/src/syft/node/node.py | 17 +++++++++++ .../syft/src/syft/protocol/data_protocol.py | 2 ++ .../object_search/migration_state_service.py | 30 +++++++++++++++++++ .../object_search/object_migration_state.py | 6 ++-- 4 files changed, 53 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 2b98ffe60b1..f7af15f36d3 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -63,6 +63,7 @@ from ..service.metadata.node_metadata import NodeMetadataV2 from ..service.network.network_service import NetworkService from ..service.notification.notification_service import NotificationService +from ..service.object_search.migration_state_service import MigrateStateService from ..service.policy.policy_service import PolicyService from ..service.project.project_service import ProjectService from ..service.queue.queue import APICallMessageHandler @@ -452,6 +453,22 @@ def root_client(self): root_client.api.refresh_api_callback() return root_client + def __validate_data_migration_state(self): + partition_to_be_migrated = [] + migration_state_service = self.get_service(MigrateStateService) + for partition_settings in self.document_store.partitions.values(): + object_type = partition_settings.object_type + canonical_name = object_type.__canonical_name__ + migration_state = migration_state_service.get_state(canonical_name) + if migration_state is not None: + if migration_state.current_version != migration_state.latest_version: + partition_to_be_migrated.append(canonical_name) + else: + migration_state.register_migration_state( + current_version=object_type.__version__, + canonical_name=canonical_name, + ) + @property def guest_client(self): return self.get_guest_client() diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 1b781bc2469..3f9e3096553 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -12,6 +12,7 @@ # relative from ..service.response import SyftException +from ..service.response import SyftSuccess from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftMigrationRegistry from ..util.util import get_env @@ -185,6 +186,7 @@ def upgrade(self): "supported": True, } self.save_state() + return SyftSuccess(message="Protocol successfully updated !!") def validate_current_state(self) -> bool: current_object_version_map = self.state[self.latest_version]["object_versions"] diff --git a/packages/syft/src/syft/service/object_search/migration_state_service.py b/packages/syft/src/syft/service/object_search/migration_state_service.py index cad1338587c..2adbd1478bd 100644 --- a/packages/syft/src/syft/service/object_search/migration_state_service.py +++ b/packages/syft/src/syft/service/object_search/migration_state_service.py @@ -42,3 +42,33 @@ def get_version( ) return migration_state.current_version + + @service_method(path="migration", name="get_state") + def get_state( + self, context: AuthedServiceContext, canonical_name: str + ) -> Union[bool, SyftError]: + result = self.stash.get_by_name( + canonical_name=canonical_name, credentials=context.credentials + ) + + if result.is_err(): + return SyftError(message=f"{result.err()}") + + return result.ok() + + @service_method(path="migration", name="register_migration_state") + def register_migration_state( + self, + context: AuthedServiceContext, + current_version: int, + canonical_name: str, + ) -> Union[SyftObjectMigrationState, SyftError]: + obj = SyftObjectMigrationState( + current_version=current_version, canonical_name=canonical_name + ) + result = self.stash.set(migration_state=obj, credentials=context.credentials) + + if result.is_err(): + return SyftError(message=f"{result.err()}") + + return result.ok() diff --git a/packages/syft/src/syft/service/object_search/object_migration_state.py b/packages/syft/src/syft/service/object_search/object_migration_state.py index 17bc2b35232..aab7749bea5 100644 --- a/packages/syft/src/syft/service/object_search/object_migration_state.py +++ b/packages/syft/src/syft/service/object_search/object_migration_state.py @@ -24,6 +24,8 @@ class SyftObjectMigrationState(SyftObject): __canonical_name__ = "SyftObjectMigrationState" __version__ = SYFT_OBJECT_VERSION_1 + __attr_unique__ = ["canonical_name"] + canonical_name: str current_version: int @@ -58,10 +60,10 @@ def __init__(self, store: DocumentStore) -> None: def set( self, credentials: SyftVerifyKey, - syft_object_metadata: SyftObjectMigrationState, + migration_state: SyftObjectMigrationState, add_permissions: Optional[List[ActionObjectPermission]] = None, ) -> Result[SyftObjectMigrationState, str]: - res = self.check_type(syft_object_metadata, self.object_type) + res = self.check_type(migration_state, self.object_type) # we dont use and_then logic here as it is hard because of the order of the arguments if res.is_err(): return res From 7456d69f687fe3eda1ed98150a4689b6251ece1f Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Fri, 29 Sep 2023 17:33:48 +1000 Subject: [PATCH 34/67] WIP: Changed data structure for versioning so that its an additive log --- packages/syft/MANIFEST.in | 1 + packages/syft/src/syft/client/api.py | 3 + packages/syft/src/syft/node/node.py | 7 +- .../syft/src/syft/protocol/data_protocol.py | 511 ++++++++---- .../src/syft/protocol/protocol_state_dev.json | 357 -------- .../src/syft/protocol/protocol_version.json | 774 ++++++++++++++++++ .../src/syft/service/action/action_object.py | 3 + packages/syft/src/syft/service/context.py | 3 + .../syft/service/metadata/node_metadata.py | 10 + packages/syft/src/syft/service/service.py | 6 + .../src/syft/store/dict_document_store.py | 1 + .../src/syft/store/mongo_document_store.py | 4 + .../src/syft/store/sqlite_document_store.py | 1 + packages/syft/src/syft/types/syft_object.py | 8 +- packages/syft/tests/syft/hash_test.py | 4 + packages/syft/tests/syft/settings/fixtures.py | 7 +- 16 files changed, 1152 insertions(+), 548 deletions(-) delete mode 100644 packages/syft/src/syft/protocol/protocol_state_dev.json create mode 100644 packages/syft/src/syft/protocol/protocol_version.json diff --git a/packages/syft/MANIFEST.in b/packages/syft/MANIFEST.in index 28648461b0f..8cdc899ed4f 100644 --- a/packages/syft/MANIFEST.in +++ b/packages/syft/MANIFEST.in @@ -2,3 +2,4 @@ include src/syft/VERSION include src/syft/capnp/* include src/syft/cache/* include src/syft/img/* +include src/syft/protocol/protocol_version.json diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index f48a8d02bba..fe57d6e805b 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -115,6 +115,9 @@ class APIEndpoint(SyftObject): @serializable() class LibEndpoint(SyftBaseObject): + __canonical_name__ = "LibEndpoint" + __version__ = SYFT_OBJECT_VERSION_1 + # TODO: bad name, change service_path: str module_path: str diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index f7af15f36d3..9c8b5c5452e 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -92,8 +92,7 @@ from ..store.document_store import StoreConfig from ..store.sqlite_document_store import SQLiteStoreClientConfig from ..store.sqlite_document_store import SQLiteStoreConfig -from ..types.syft_object import HIGHEST_SYFT_OBJECT_VERSION -from ..types.syft_object import LOWEST_SYFT_OBJECT_VERSION +from ..types.syft_object import SYFT_OBJECT_VERSION_1 from ..types.syft_object import SyftObject from ..types.uid import UID from ..util.experimental_flags import flags @@ -674,8 +673,8 @@ def metadata(self) -> NodeMetadataV2: name=name, id=self.id, verify_key=self.verify_key, - highest_version=HIGHEST_SYFT_OBJECT_VERSION, - lowest_version=LOWEST_SYFT_OBJECT_VERSION, + highest_version=SYFT_OBJECT_VERSION_1, + lowest_version=SYFT_OBJECT_VERSION_1, syft_version=__version__, deployed_on=deployed_on, description=description, diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 3f9e3096553..137b1fb885f 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -1,39 +1,78 @@ # stdlib +from collections import defaultdict import hashlib import json import os from pathlib import Path +import re from typing import Dict -from typing import List from typing import Optional from typing import Tuple from typing import Type -from typing import Union + +# third party +from result import Result # relative +from ..serde.recursive import TYPE_BANK +from ..service.response import SyftError from ..service.response import SyftException from ..service.response import SyftSuccess from ..types.syft_object import SyftBaseObject -from ..types.syft_object import SyftMigrationRegistry -from ..util.util import get_env -from ..util.util import index_syft_by_module_name -from ..util.util import str_to_bool - - -def get_dev_mode() -> bool: - return str_to_bool(get_env("DEV_MODE", "False")) -PROTOCOL_STATE_FILENAME = "protocol_state.json" -PROTOCOL_STATE_FILENAME_DEV = "protocol_state_dev.json" - - -def data_protocol_file_name(dev_mode: bool = False): - return ( - PROTOCOL_STATE_FILENAME_DEV - if (get_dev_mode() or dev_mode) - else PROTOCOL_STATE_FILENAME - ) +def natural_key(key: int | str) -> list[int]: + """Define key for natural ordering of strings.""" + if isinstance(key, int): + key = str(key) + return [int(s) if s.isdigit() else s for s in re.split("(\d+)", key)] + + +def sort_dict_naturally(d: dict) -> dict: + """Sort dictionary by keys in natural order.""" + return {k: d[k] for k in sorted(d.keys(), key=natural_key)} + + +def protocol_state_builder(protocol_dict: dict, stop_key: Optional[str] = None) -> dict: + sorted_dict = sort_dict_naturally(protocol_dict) + state_dict = defaultdict(dict) + for k, _v in sorted_dict.items(): + # stop early + if stop_key == k: + return state_dict + object_versions = sorted_dict[k]["object_versions"] + for canonical_name, versions in object_versions.items(): + for version, object_metadata in versions.items(): + action = object_metadata["action"] + version = object_metadata["version"] + hash_str = object_metadata["hash"] + state_versions = state_dict[canonical_name] + if action == "add" and ( + str(version) in state_versions.keys() + or hash_str in state_versions.values() + ): + raise Exception( + f"Can't add {object_metadata} already in state {versions}" + ) + elif action == "remove" and ( + str(version) not in state_versions.keys() + or hash_str not in state_versions.values() + ): + raise Exception( + f"Can't remove {object_metadata} missing from state {versions}" + ) + if action == "add": + state_dict[canonical_name][str(version)] = hash_str + elif action == "remove": + del state_dict[canonical_name][str(version)] + return state_dict + + +PROTOCOL_STATE_FILENAME = "protocol_version.json" + + +def data_protocol_file_name(): + return PROTOCOL_STATE_FILENAME def data_protocol_dir(): @@ -44,13 +83,85 @@ class InConsistentVersionException(Exception): pass +def diff_state(state: dict) -> dict: + object_diff = defaultdict(dict) + compare_dict = defaultdict(dict) + for k in TYPE_BANK: + ( + nonrecursive, + serialize, + deserialize, + attribute_list, + exclude_attrs_list, + serde_overrides, + hash_exclude_attrs, + cls, + attribute_types, + version, + ) = TYPE_BANK[k] + if issubclass(cls, SyftBaseObject): + canonical_name = cls.__canonical_name__ + hash_str = DataProtocol._calculate_object_hash(cls) + + # build this up for later + compare_dict[canonical_name][version] = hash_str + + if canonical_name not in state: + # new object so its an add + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "add" + continue + + versions = state[canonical_name] + if str(version) in versions.keys() and versions[str(version)] == hash_str: + # already there so do nothing + continue + elif str(version) in versions.keys(): + raise Exception( + f"{canonical_name} {cls} version {version} hash has changed. " + + f"{hash_str} not in {versions.values()}. " + + "You probably need to bump the version number." + ) + else: + # new object so its an add + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "add" + continue + + # now check for remove actions + for canonical_name in state: + for version, hash_str in state[canonical_name].items(): + if canonical_name not in compare_dict: + # missing so its a remove + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "remove" + continue + versions = compare_dict[canonical_name] + if str(version) in versions.keys(): + # missing so its a remove + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "remove" + continue + return object_diff + + class DataProtocol: def __init__(self, filename: str) -> None: self.file_path = Path(data_protocol_dir()) / filename - self.state = self.read_state() + self.protocol_history = self.read_history() + self.state = self.build_state() @staticmethod def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: + # TODO: this depends on what is marked as serde field_data = { field_name: repr(model_field.annotation) for field_name, model_field in klass.__fields__.items() @@ -64,170 +175,214 @@ def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: return hashlib.sha256(json.dumps(obj_meta_info).encode()).hexdigest() - def calc_latest_object_versions(self): - object_latest_version_map = {} - migration_registry = SyftMigrationRegistry.__migration_version_registry__ - for canonical_name in migration_registry: - available_versions = migration_registry[canonical_name] - version_obj_hash_map = {} - for object_version, fqn in available_versions.items(): - object_klass = index_syft_by_module_name(fqn) - object_hash = self._calculate_object_hash(object_klass) - version_obj_hash_map[object_version] = object_hash - object_latest_version_map[canonical_name] = version_obj_hash_map - - return object_latest_version_map - - def read_state(self) -> Dict: + # def calc_latest_object_versions(self): + # object_latest_version_map = {} + # migration_registry = SyftMigrationRegistry.__migration_version_registry__ + # for canonical_name in migration_registry: + # available_versions = migration_registry[canonical_name] + # version_obj_hash_map = {} + # for object_version, fqn in available_versions.items(): + # object_klass = index_syft_by_module_name(fqn) + # object_hash = self._calculate_object_hash(object_klass) + # version_obj_hash_map[object_version] = object_hash + # object_latest_version_map[canonical_name] = version_obj_hash_map + + # return object_latest_version_map + + def read_history(self) -> Dict: return json.loads(self.file_path.read_text()) - def save_state(self): - self.file_path.write_text(json.dumps(self.state)) - - def find_deleted_versions( - self, - current_object_to_version_map: Dict, - new_object_to_version_map: Dict, - ): - deleted_object_classes = set(current_object_to_version_map).difference( - new_object_to_version_map.keys() - ) - - deleted_versions_map = {} - - for canonical_name, new_versions in new_object_to_version_map.items(): - current_versions = current_object_to_version_map.get( - canonical_name, - None, - ) - if current_versions is None: - continue - - deleted_versions = list(set(current_versions).difference(new_versions)) - deleted_versions_map[canonical_name] = deleted_versions - - return deleted_object_classes, deleted_versions_map - - def recompute_supported_states( - self, - current_protocol_version: int, - new_object_to_version_map: Dict, - ): - current_protocol_state = self.state[str(current_protocol_version)] - deleted_object_classes, deleted_versions_map = self.find_deleted_versions( - current_protocol_state, - new_object_to_version_map=new_object_to_version_map, - ) - - for _, protocol_state in self.state.items(): - object_versions = protocol_state["object_versions"] - if protocol_state["supported"]: - continue - - # Check if any object class is deleted, - # then mark the protocol as not supported. - is_unsupported = any( - object_class in object_versions - for object_class in deleted_object_classes - ) - if is_unsupported: - protocol_state["supported"] = False - continue - - for object_class, supported_versions in deleted_versions_map.items(): - available_versions = object_versions.get(object_class, []) - unsupported_versions_present = set(available_versions).intersection( - supported_versions - ) - if unsupported_versions_present: - is_unsupported = True - break - - if is_unsupported: - protocol_state["supported"] = False - - @property - def state_defined(self): - return len(self.state) > 0 - - @property - def latest_version(self): - return int(max(self.state.keys())) + def save_history(self, history: dict) -> None: + self.file_path.write_text(json.dumps(history, indent=2)) + + # def find_deleted_versions( + # self, + # current_object_to_version_map: Dict, + # new_object_to_version_map: Dict, + # ): + # deleted_object_classes = set(current_object_to_version_map).difference( + # new_object_to_version_map.keys() + # ) + + # deleted_versions_map = {} + + # for canonical_name, new_versions in new_object_to_version_map.items(): + # current_versions = current_object_to_version_map.get( + # canonical_name, + # None, + # ) + # if current_versions is None: + # continue + + # deleted_versions = list(set(current_versions).difference(new_versions)) + # deleted_versions_map[canonical_name] = deleted_versions + + # return deleted_object_classes, deleted_versions_map + + # def recompute_supported_states( + # self, + # current_protocol_version: int, + # new_object_to_version_map: Dict, + # ): + # current_protocol_state = self.state[str(current_protocol_version)] + # deleted_object_classes, deleted_versions_map = self.find_deleted_versions( + # current_protocol_state, + # new_object_to_version_map=new_object_to_version_map, + # ) + + # for _, protocol_state in self.state.items(): + # object_versions = protocol_state["object_versions"] + # if protocol_state["supported"]: + # continue + + # # Check if any object class is deleted, + # # then mark the protocol as not supported. + # is_unsupported = any( + # object_class in object_versions + # for object_class in deleted_object_classes + # ) + # if is_unsupported: + # protocol_state["supported"] = False + # continue + + # for object_class, supported_versions in deleted_versions_map.items(): + # available_versions = object_versions.get(object_class, []) + # unsupported_versions_present = set(available_versions).intersection( + # supported_versions + # ) + # if unsupported_versions_present: + # is_unsupported = True + # break + + # if is_unsupported: + # protocol_state["supported"] = False + + # @property + # def state_defined(self): + # return len(self.state) > 0 + + # @property + # def latest_version(self): + # return int(max(self.state.keys())) @staticmethod def _hash_to_sha256(obj_dict: Dict) -> str: return hashlib.sha256(json.dumps(obj_dict).encode()).hexdigest() - def upgrade(self): - object_to_version_map = self.calc_latest_object_versions() - new_protocol_hash = self._hash_to_sha256(object_to_version_map) - - if not self.state_defined: - new_protocol_version = 1 - else: - # Find the current version - current_protocol_version = self.latest_version - - new_protocol_version = int(current_protocol_version) + 1 - - current_protocol_state = self.state[str(current_protocol_version)] - if current_protocol_state["hash"] == new_protocol_hash: - print("No change in schema. Skipping upgrade.") - return - - self.recompute_supported_states( - current_protocol_version=current_protocol_version, - new_object_to_version_map=object_to_version_map, - ) - - self.state[new_protocol_version] = { - "object_versions": object_to_version_map, - "hash": new_protocol_hash, - "supported": True, - } - self.save_state() - return SyftSuccess(message="Protocol successfully updated !!") - - def validate_current_state(self) -> bool: - current_object_version_map = self.state[self.latest_version]["object_versions"] - inconsistent_versions = [] - - migration_registry = SyftMigrationRegistry.__migration_version_registry__ - for canonical_name in migration_registry: - available_versions = migration_registry[canonical_name] - curr_version_hash_map = current_object_version_map.get(canonical_name, {}) - for object_version, fqn in available_versions.items(): - object_klass = index_syft_by_module_name(fqn) - object_hash = self._calculate_object_hash(object_klass) - if curr_version_hash_map.get(str(object_version), None) != object_hash: - inconsistent_versions.append((canonical_name, object_version)) - - if len(inconsistent_versions) > 0: - raise InConsistentVersionException( - f"Version update is required for the following objects.\n {inconsistent_versions}" - ) - - return True - - @property - def supported_protocols(self) -> List[int]: - """Returns a list of protocol numbers that are marked as supported.""" - return [ - int(protocol_version) - for protocol_version, protocol_state in self.state.items() - if str_to_bool(protocol_state["supported"]) - ] - - def get_object_versions(self, protocol: Union[int, str]) -> List: - return self.state[str(protocol)]["object_versions"] - - -def get_data_protocol(dev_mode: bool = False): - return DataProtocol(filename=data_protocol_file_name(dev_mode=dev_mode)) - - -def upgrade_protocol(dev_mode: bool = True): - data_protocol = get_data_protocol(dev_mode=dev_mode) + def build_state(self) -> dict: + return protocol_state_builder(self.protocol_history) + + def diff(self, state: dict) -> dict: + return diff_state(state) + + def upgrade(self) -> Result[SyftSuccess, SyftError]: + state = self.build_state() + print(">>> got state", state) + diff = self.diff(state) + print(">>> got diff", diff) + current_history = self.protocol_history + if "dev" not in current_history: + current_history["dev"] = {} + current_history["dev"]["object_versions"] = {} + object_versions = current_history["dev"]["object_versions"] + for canonical_name, versions in diff.items(): + for version, version_metadata in versions.items(): + if canonical_name not in object_versions: + object_versions[canonical_name] = {} + object_versions[canonical_name][version] = version_metadata + + current_history["dev"]["object_versions"] = object_versions + self.save_history(current_history) + + # def bump_version(self) -> Result[SyftSuccess, SyftError]: + # state = self.build_state() + # print(">>> got state", state) + # diff = self.diff(state) + # print(">>> got diff", diff) + # current_history = self.protocol_history + # if "dev" not in current_history: + # current_history["dev"] = {} + # current_history["dev"]["object_versions"] = {} + # object_versions = current_history["dev"]["object_versions"] + # for canonical_name, versions in diff.items(): + # for version, version_metadata in versions.items(): + # if canonical_name not in object_versions: + # object_versions[canonical_name] = {} + # object_versions[canonical_name][version] = version_metadata + + # current_history["dev"]["object_versions"] = object_versions + # self.save_history(current_history) + + # def upgrade(self): + # object_to_version_map = self.calc_latest_object_versions() + # new_protocol_hash = self._hash_to_sha256(object_to_version_map) + + # if not self.state_defined: + # new_protocol_version = 1 + # else: + # # Find the current version + # current_protocol_version = self.latest_version + + # new_protocol_version = int(current_protocol_version) + 1 + + # current_protocol_state = self.state[str(current_protocol_version)] + # if current_protocol_state["hash"] == new_protocol_hash: + # print("No change in schema. Skipping upgrade.") + # return + + # self.recompute_supported_states( + # current_protocol_version=current_protocol_version, + # new_object_to_version_map=object_to_version_map, + # ) + + # self.state[new_protocol_version] = { + # "object_versions": object_to_version_map, + # "hash": new_protocol_hash, + # "supported": True, + # } + # self.save_state() + # return SyftSuccess(message="Protocol Updated") + + # def validate_current_state(self) -> bool: + # current_object_version_map = self.state[self.latest_version]["object_versions"] + # inconsistent_versions = [] + + # migration_registry = SyftMigrationRegistry.__migration_version_registry__ + # for canonical_name in migration_registry: + # available_versions = migration_registry[canonical_name] + # curr_version_hash_map = current_object_version_map.get(canonical_name, {}) + # for object_version, fqn in available_versions.items(): + # object_klass = index_syft_by_module_name(fqn) + # object_hash = self._calculate_object_hash(object_klass) + # if curr_version_hash_map.get(str(object_version), None) != object_hash: + # inconsistent_versions.append((canonical_name, object_version)) + + # if len(inconsistent_versions) > 0: + # raise InConsistentVersionException( + # f"Version update is required for the following objects.\n {inconsistent_versions}" + # ) + + # return True + + # @property + # def supported_protocols(self) -> List[int]: + # """Returns a list of protocol numbers that are marked as supported.""" + # return [ + # int(protocol_version) + # for protocol_version, protocol_state in self.state.items() + # if str_to_bool(protocol_state["supported"]) + # ] + + # def get_object_versions(self, protocol: Union[int, str]) -> List: + # return self.state[str(protocol)]["object_versions"] + + +def get_data_protocol(): + return DataProtocol(filename=data_protocol_file_name()) + + +def upgrade_protocol(): + data_protocol = get_data_protocol() data_protocol.upgrade() diff --git a/packages/syft/src/syft/protocol/protocol_state_dev.json b/packages/syft/src/syft/protocol/protocol_state_dev.json deleted file mode 100644 index 11c13ba2800..00000000000 --- a/packages/syft/src/syft/protocol/protocol_state_dev.json +++ /dev/null @@ -1,357 +0,0 @@ -{ - "1": { - "object_versions": { - "SyftObject": { - "1": "25a574002025025cfd155e3970305293e21fdd6af9dcde176990802306cc0359" - }, - "PartialSyftObject": { - "1": "fa2770d76f3dd904e2c1c4a78c9ba10e5ac33b4dd7a4d61faa45a22078e94aa8" - }, - "NodeServiceContext": { - "1": "3d409e38765cd0be89a3d36718c0a34bdc9ed0a6d67d2e6d10895d88df8595e3" - }, - "AuthedServiceContext": { - "1": "cec43782533403ab89a070a9548c4e9f4cd3bf028b4947fa9af7f83cb6c87d32" - }, - "UnauthedServiceContext": { - "1": "f83f0d6ed8ef5f5b11090e2a8c56d4255ea2adeb828bb9f983cc007920c7d4a7" - }, - "NodeMetadataUpdate": { - "1": "b73adf379a179f6ef9a6d5ee9477e69697a47ad25579bab46cc1fc44ec35ba04" - }, - "NodeMetadata": { - "1": "015a04cbfa19adbad7f606e0c419a1dc615a27eff7b76c426853ea3ca4eda860", - "2": "3986c7d8b3cf9355a1fbdd99dfe3d872fc464420b91a668ea3288ee4481bab6d" - }, - "LinkedObject": { - "1": "8a117f8bf0282b8cf525de893404dbd88cc460a5a823d83006c2e82d88846f8d" - }, - "NodeConnection": { - "1": "9f2049387cf966342d16c295d4fa4012845f952399e9d1d96d77f62762f091e3" - }, - "APIEndpoint": { - "1": "c88e4405839e87fdfe90f86877ef2addd7be7281d36b7891636129fc8b3c1e8c" - }, - "SignedSyftAPICall": { - "1": "2be9b74663354b4edeef3bc75dc67dc35bf24890c8a86a53a97957d470af06b2" - }, - "SyftAPICall": { - "1": "fa9520d29d9df56fb9d5d2080aecfc3be14c49e7267a3e5b9fd05ad1b0828e11" - }, - "SyftAPI": { - "1": "142a9bb5a4a63d5d072fbfbdbb48ec9535f1ae51e40f9d4f59760fb807c9a189" - }, - "User": { - "1": "21cb3659dc4ddd0b4d58c677f46feecac9f682ea36660f42845067b29b5ad8e7" - }, - "UserUpdate": { - "1": "f12c19dd38330f98fb2d9e0bf47e8bdcad3f6e1c085d2994d80224cf4b905984" - }, - "UserCreate": { - "1": "a9d7a52aaa7dcf622e317e899e1ded3023a94b86773ca16cd7d6a334fcffbe8b" - }, - "UserSearch": { - "1": "e697bf5b287cf29560c94c5851d8fb6ac74d2ce5c6200539a11a257bc150c75b" - }, - "UserView": { - "1": "fd624963af09c0e3471dfc49b2f09fafdd7521c8af804198015cc5455d7b56bc" - }, - "UserViewPage": { - "1": "c856f13ddc9a405b6d52482a6a273ffb038f988d589e7bb5cd68e0c8dd8668de" - }, - "UserPrivateKey": { - "1": "e06f237cdfd516caff0766b5f8ba4e4b2b85824c76910f1374ffce72173f8269" - }, - "NodeSettingsUpdate": { - "1": "ae77fb9f24004635a29abd979f5efa5a75780efe4fec1773cc533ac04aa57482" - }, - "NodeSettings": { - "1": "2735f660f23bfda8ffdf97b8ba3ab1bcdba01461b245832e9a9cb2a661ebcb74" - }, - "HTTPConnection": { - "1": "0a6e181e67978ce45a2414be4c4c39272ca6ed38a5fe9e9877619c13dc6aafef" - }, - "PythonConnection": { - "1": "6ab9e80d2208ce44fb6e5db8427234680f56b1ef8092be701d6d833b6e213926" - }, - "DateTime": { - "1": "34f9942a3f75988a1de6e46c40697698f2505336cf74282f683cfd3a7d6d4ec1" - }, - "BlobFile": { - "1": "229121eb07430f72c66281764a690270ff821a6551c036528b9e749b343bedc1" - }, - "SecureFilePathLocation": { - "1": "d3e6b95de5da0861922c302e9dabf443ee337b21da695d69c85bdb1e6f0ec45b" - }, - "SeaweedSecureFilePathLocation": { - "1": "0d5c382191c63e68b90237bb4e882abea6311ff1ba645adc784ee272de5f4623" - }, - "BlobStorageEntry": { - "1": "e010b50076f73fb934029a583310d13c3ec7abaa93520090fae3fb16457868fc" - }, - "BlobStorageMetadata": { - "1": "f1d0b4085276ba5f15a8cd81553374d465317d96e0a1c427e33e2d866e362d22" - }, - "CreateBlobStorageEntry": { - "1": "183fd3ed16b0687f01979b3b76bbb17a9b029bd39f11d46faf54b3e5205e9e2d" - }, - "BlobRetrieval": { - "1": "c55f486ea79e9e96c047b32987e555357248cd30f18e1a8244fefe73457e5b9e" - }, - "SyftObjectRetrieval": { - "1": "da96484c8e57fc060c4dba29ef4e375284720dd05f1ed2ee60e1df52450437cd" - }, - "BlobRetrievalByURL": { - "1": "656a44e91ce560056679b459c9fd33b55a5c2b0754e455099a456074e2e14822" - }, - "BlobDeposit": { - "1": "23a73cc9bff8e6833681e55d872121b6d54520d76f9426fd199a19eb847deea4" - }, - "WorkerSettings": { - "1": "2fe75dd39cb6367bd9cea2c7f59e40a85bbbcfc44f518572f377ef25c3acd205" - }, - "HTTPNodeRoute": { - "1": "b4662c11f7487ab907caf3cadf8c33eca2e0fbd640ed1fba872c3f982b749986" - }, - "PythonNodeRoute": { - "1": "d8f268996e5443a248cc2eb5f4a568591e5f9e18952f8519f3f995e1f1f486e4" - }, - "EnclaveMetadata": { - "1": "6ad19306231ebbb4d8b0c4e4cc82b881298835862a2c48f2358db47215a020ac" - }, - "DataSubject": { - "1": "287ab306e1c4ebe0c883600ffd52dc734d08f0217b2a961afbdc6c7883bf4ccd" - }, - "DataSubjectCreate": { - "1": "8b3487af42ba249d7cf705c7f66a09dd832c055814091def34a38133f8148158" - }, - "DataSubjectMemberRelationship": { - "1": "6aed0e1548d6b09bfac132040f7315f49c13855a6bc147a4f1aa4ce09572b952" - }, - "Contributor": { - "1": "3e27f1ea23cecfc3e0956743ae832f0f70ecd144f9df3f128b123e9347944afa" - }, - "MarkdownDescription": { - "1": "506d47fa85728ad444f2fa657e39b341bc759d21a80325831b1e84926ee748f1" - }, - "Asset": { - "1": "f8370e8dd87df6a05bde1226c52c3ce6e7de636e6032341b977efe5e601a731d" - }, - "CreateAsset": { - "1": "c3a82856694adcb3c916a961776c2fa3bc90a7ccb50b8b9d42776810266ed241" - }, - "Dataset": { - "1": "d7a45bf9128472715e63a67192d4181672eadd8b5873d2ba96c2df3b2be749b9" - }, - "DatasetPageView": { - "1": "c7da1fac28f70c19d199f123b04fbd4a9c7681e3846dee0de70ea926a9440a2e" - }, - "CreateDataset": { - "1": "6a31617de99738bc176f19397884581051443c7c7ba952c983929fed688a9d7e" - }, - "ActionDataEmpty": { - "1": "fc83d91ac6ba78c749a4e7e16c1aa117baaae62b5f33156ded4e5a79b9532a98" - }, - "ActionFileData": { - "1": "47a0a5f9fb48de09885d4f9a6c5a5a05f4dd04575ea42df7dea0cab02817063f" - }, - "Action": { - "1": "204b3c97b41f208ecb17c541af55e171675faaefa10c103b405a4077b0226a7e" - }, - "ActionObject": { - "1": "8b2fcd90f6f05bb438ac2fc7fa80a480f1509990686c86a52c37db6448b869bf" - }, - "AnyActionObject": { - "1": "f11bd6135ba46247c06bfa8a3c6b7f2e540a1033afe0512c3359e31eb3d59388" - }, - "TwinObject": { - "1": "8f6abd756d41f9639865c6fd55e6cc2ec6b89fd18bac3c77bf3a0502d81ca8ec" - }, - "Policy": { - "1": "875c4ce86029c627cee2fdfc6f78807fc1a2a0c8ad58e3ffc5122b8d91f4bd1c" - }, - "InputPolicy": { - "1": "f096b87570dd66a0986dec6d52aa7e516a40186c6038a08c1eca737cb109935a" - }, - "ExactMatch": { - "1": "f71a495d2452190596fe435eaf59b07ba390d7895f6c808a2f87a1719227ba9c" - }, - "OutputHistory": { - "1": "24f282dd181ecc7a05c93c02dff84dff45aa52928e2331a15ef667645e9bcf0b" - }, - "OutputPolicy": { - "1": "f56d3427c23f1faf29e9bf4176c12c06d0b6f4d46e5d23f4b8e7c2b110c1b5c8" - }, - "OutputPolicyExecuteCount": { - "1": "95d198d2021dbaf9965df39a4571ad06e787684ff79bd6e8a720c47825eebd7e" - }, - "OutputPolicyExecuteOnce": { - "1": "b8bc1fea2e9b51b5dfc3cbd4b0a131cb2a5b1fe746b5e329395bf319b38bf9b2" - }, - "UserOutputPolicy": { - "1": "a5e9e5f3b165d99c33b7de5c1ddf338baceda6f9f66d3b94bc92f2b47f52129d" - }, - "UserInputPolicy": { - "1": "a177ffaf9de5acede248c81ee55421f3f6aae696f8614c5d175a42d36fff1615" - }, - "UserPolicy": { - "1": "ec3f8ea84e2b05ce56db8f35cff14f78569e921f566477581b3299eb6a9fa053" - }, - "SubmitUserPolicy": { - "1": "3147a4e45270367a40ca8c4b7b502c8683200d123828720365521b90b2775794" - }, - "UserCode": { - "1": "ad509fccb7bb5c00971453c1f1235785f40d5d5b3eee3df1dc9edafc758c7193" - }, - "SubmitUserCode": { - "1": "b4a919054beb2488c7b4839d60c494400d791112adf009631dce13787cd58e78" - }, - "UserCodeExecutionResult": { - "1": "9aab24def6616ac908ef1100b654b4dbeca1ea4cfff229c53d6b14491c795555" - }, - "CodeHistory": { - "1": "bbbd59801717a98691034a37c9de388c7a65db2721bd0c5c9ff0dbe8fc70be67" - }, - "CodeHistoryView": { - "1": "142e78415da10dae739e923d39ce511496a3c7b31e8c4553a6cbb1907c126a3a" - }, - "CodeHistoriesDict": { - "1": "453af101a1de8e37a0bfacf22077c35994718791f295956f1f06727f8d9b7fe8" - }, - "UsersCodeHistoriesDict": { - "1": "cf8ef92a08cabb068e4616c1700451b75ba4d511343830e3e56b65882fb784aa" - }, - "NodePeer": { - "1": "8920d9e456fd1a13f46c0528a8fe3fec8af46440b3eb89e8d7d48ad64babee1e" - }, - "ProxyClient": { - "1": "c9698a29223df0d477688293670f3a7a7d51556d4b2ed7e222e55350a0b053f4" - }, - "CommandReport": { - "1": "a81fe3d0cc5796f45e925d09b6e8132b79fe5df0f341d55b3870c109f8c1e19d" - }, - "CommandResult": { - "1": "14b48d4a1cbc5f5ae1e5e74834e7f1002adae7b2766000ea822f180fd7cd39db" - }, - "VPNClientConnection": { - "1": "7d44711978f930d42c06d04483abcdb1f230782a8f16f4feb5efb7b2b2093bb2" - }, - "HeadscaleAuthToken": { - "1": "0b363503b6c611b44e33561a2a6db3f260cfd4bbc5f4245deac5052fd5149803" - }, - "TailscalePeer": { - "1": "8ff85aa2b913a6bb206b9de0495d9f74a17f55823891da98cb6fdbe78f46a44b" - }, - "TailscaleStatus": { - "1": "ed262f4b9a569d9933f4a86cd2caa2ce213fc7a2319a1371f6a3cf3ccf884c8a" - }, - "OnDiskBlobDeposit": { - "1": "da3abda453def0d7c70c8a5dfcc3c8d00dd6822f60ddc01be3bdead4b0b5b482" - }, - "SeaweedFSBlobDeposit": { - "1": "bcbec5dcdc06a0c87f89a10a6a8809706f24cedd97b5f850f8b48840a1f41941" - }, - "NumpyArrayObject": { - "1": "d47a376401d92d47e5748e34f98ee270f8ebfd52cffbe6271b5faa8193e728c5" - }, - "NumpyScalarObject": { - "1": "952bebb4dd3e3641c33b4ebcf2c051dbdebae5f1bf3b7b63ea89423360705411" - }, - "NumpyBoolObject": { - "1": "b7a231baaa4b1f519d70c5afb15b4a9b7232f1128f7fd3709c1ea8b7345f8c6c" - }, - "PandasDataframeObject": { - "1": "ff9d6c1884413f712d95d29190e30938b33de19e11dff9f88d9b89c51499cac5" - }, - "PandasSeriesObject": { - "1": "69eadfe14e5a7035767d2538e2db8775da6569cf5127f58d13315c4b85e5603d" - }, - "ReplyNotification": { - "1": "ce1e2a6b0d618478d3b1b992e4c8605817919c88a4884ca0540e0886ecdb8215" - }, - "Notification": { - "1": "1e5a65d91e27bf53d5b2ed0b45d9cee0cf77104b7111f99223194ceb0d0137fe" - }, - "CreateNotification": { - "1": "6858b743ac07d853a0302dc64de0e7d852135e2564ebad325d5ff35d17c29f6f" - }, - "Change": { - "1": "2c470ff8aa076b88105640ce79d361a9b439927e501c238fa33ac7c1c45aa2c0" - }, - "ChangeStatus": { - "1": "7571229e92d8d52a0e90fa8856169b41045b42b50568b266823bdcea838dfb39" - }, - "ActionStoreChange": { - "1": "cf527995930cce09d90806713d30301493240079319bcc85e894428aee46017e" - }, - "Request": { - "1": "340f4ac61ccbf6f566666327d6bca043dcd643e6f8e24897ef10bd6312e74995" - }, - "RequestInfo": { - "1": "d571708de3c187ca5840c64784d99f7bfce8f33aa2ba48f9d56b824564551654" - }, - "RequestInfoFilter": { - "1": "c336af8d474071eb61e5f467513753e64d4e153e12892f9c6875b235057b0f0a" - }, - "SubmitRequest": { - "1": "1870ce541169eab04cb69d3ae88ea30dc2fcdd997b620567ca9d87936d9600cf" - }, - "ObjectMutation": { - "1": "275d9cf180904d1e34e1f3d7e838105e843127faf5a64029a1cf85d00234b8c9" - }, - "EnumMutation": { - "1": "3a1d1b47e0cdb5094298bce58bc9b76ecb66064459504f910c7d755eb1d5e276" - }, - "UserCodeStatusChange": { - "1": "928dd4ceeb4858b18b806ca62b49a840f54269f7866744c9aa6edb0af9d7dfc1" - }, - "ProjectEvent": { - "1": "97db1898509ff160c9defae6d02c7aa6e9623b7fbdf517263d9dc016881c5e01" - }, - "ProjectEventAddObject": { - "1": "c2241a4a66d04ed59334afde1f1fd2ce1384cb9b5862ad16eacb11ade868ea2f" - }, - "ProjectEventAddLink": { - "1": "47e304a66bfa3ff89be5ea531de43ff72e8628f8feaca9c8934edad4a118b5f0" - }, - "ProjectSubEvent": { - "1": "6dc448efa26bec63eb5e8127b5d541e40136a1a382e16e91db1d67b25bfe1213" - }, - "ProjectThreadMessage": { - "1": "9f8b11d603caae6d0e0f28957949dfc57c26fec9685f2c80501330b1d6bae665" - }, - "ProjectMessage": { - "1": "d678beafc33f7e7df7e771a82d5cba6d5a36728a033d3976b6e5998726733d27" - }, - "ProjectRequestResponse": { - "1": "51b5a5d8cf0bde45abd2bd3a4411f93769fa542666a137ce9611d38fb48ffb4c" - }, - "ProjectRequest": { - "1": "9eff1b3cc74c9706722363abb4062fc77c0a4f093d448b795ad662861649f111" - }, - "AnswerProjectPoll": { - "1": "f538a4fcae286cbc9755f51e2f2ce8809d66ce5d66f50173ef1824f89ce9b51d" - }, - "ProjectPoll": { - "1": "b456a699a249fd3fffe9739cdd9ec3ee8c05e59b2d9872ad9864167d78088091" - }, - "Project": { - "1": "bf59890e92d95b362cc7ef9c3d7fa6a1815978e02111a30cbcb047239e57d61e" - }, - "ProjectSubmit": { - "1": "5084844056ddefcea7fc634dd9945c03ef6d030bcd8f63aa07fe11fea0a5389f" - }, - "QueueItem": { - "1": "1d53446d5cd788120b15ea6b108a4a7abd480377370be7128f44297f8fb00b76" - }, - "ZMQClientConfig": { - "1": "e3153f18c9fd04cf07b844153d093c8a090baac4c99d71ecd6491961e7f1dafb" - }, - "Plan": { - "1": "41713fc89a2cab7db592df6cd1c45e1309f86a50a8f531ddaf4052947186b0e0" - } - }, - "hash": "845f621ac334abbecb6ed7adcefa03faccaa9ee9dd722a1780e3bde9535093da", - "supported": true - } -} diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json new file mode 100644 index 00000000000..a69ad8c0467 --- /dev/null +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -0,0 +1,774 @@ +{ + "dev": { + "object_versions": { + "PartialSyftObject": { + "1": { + "version": 1, + "hash": "fa2770d76f3dd904e2c1c4a78c9ba10e5ac33b4dd7a4d61faa45a22078e94aa8", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "1": { + "version": 1, + "hash": "b73adf379a179f6ef9a6d5ee9477e69697a47ad25579bab46cc1fc44ec35ba04", + "action": "add" + } + }, + "NodeMetadata": { + "1": { + "version": 1, + "hash": "015a04cbfa19adbad7f606e0c419a1dc615a27eff7b76c426853ea3ca4eda860", + "action": "add" + }, + "2": { + "version": 2, + "hash": "3986c7d8b3cf9355a1fbdd99dfe3d872fc464420b91a668ea3288ee4481bab6d", + "action": "add" + } + }, + "LinkedObject": { + "1": { + "version": 1, + "hash": "8a117f8bf0282b8cf525de893404dbd88cc460a5a823d83006c2e82d88846f8d", + "action": "add" + } + }, + "BaseConfig": { + "1": { + "version": 1, + "hash": "f35db90c1c3bdd993b7b1e4f28327a8f6b1d0e43115b044167ca28d5c740178a", + "action": "add" + } + }, + "ServiceConfig": { + "1": { + "version": 1, + "hash": "be54130ee8e6502d91e07b14e96ff0dd779afc36617808067aae3a1a2055b6ea", + "action": "add" + } + }, + "LibConfig": { + "1": { + "version": 1, + "hash": "1a18afad23100c2979e9e2c4e6f046fd65dc2dead405feb57aea4c852b98df01", + "action": "add" + } + }, + "APIEndpoint": { + "1": { + "version": 1, + "hash": "c88e4405839e87fdfe90f86877ef2addd7be7281d36b7891636129fc8b3c1e8c", + "action": "add" + } + }, + "LibEndpoint": { + "1": { + "version": 1, + "hash": "214b81bda5fd00e7734227a4c75981d807f3741039be5b4fb007076cf2e638cc", + "action": "add" + } + }, + "SignedSyftAPICall": { + "1": { + "version": 1, + "hash": "2be9b74663354b4edeef3bc75dc67dc35bf24890c8a86a53a97957d470af06b2", + "action": "add" + } + }, + "SyftAPICall": { + "1": { + "version": 1, + "hash": "fa9520d29d9df56fb9d5d2080aecfc3be14c49e7267a3e5b9fd05ad1b0828e11", + "action": "add" + } + }, + "SyftAPIData": { + "1": { + "version": 1, + "hash": "16933cd10a2c45ad44826480e88e06ffbb7a7762c058c7e84da94ddc70478e7c", + "action": "add" + } + }, + "SyftAPI": { + "1": { + "version": 1, + "hash": "142a9bb5a4a63d5d072fbfbdbb48ec9535f1ae51e40f9d4f59760fb807c9a189", + "action": "add" + } + }, + "User": { + "1": { + "version": 1, + "hash": "21cb3659dc4ddd0b4d58c677f46feecac9f682ea36660f42845067b29b5ad8e7", + "action": "add" + } + }, + "UserUpdate": { + "1": { + "version": 1, + "hash": "f12c19dd38330f98fb2d9e0bf47e8bdcad3f6e1c085d2994d80224cf4b905984", + "action": "add" + } + }, + "UserCreate": { + "1": { + "version": 1, + "hash": "a9d7a52aaa7dcf622e317e899e1ded3023a94b86773ca16cd7d6a334fcffbe8b", + "action": "add" + } + }, + "UserSearch": { + "1": { + "version": 1, + "hash": "e697bf5b287cf29560c94c5851d8fb6ac74d2ce5c6200539a11a257bc150c75b", + "action": "add" + } + }, + "UserView": { + "1": { + "version": 1, + "hash": "fd624963af09c0e3471dfc49b2f09fafdd7521c8af804198015cc5455d7b56bc", + "action": "add" + } + }, + "UserViewPage": { + "1": { + "version": 1, + "hash": "c856f13ddc9a405b6d52482a6a273ffb038f988d589e7bb5cd68e0c8dd8668de", + "action": "add" + } + }, + "UserPrivateKey": { + "1": { + "version": 1, + "hash": "e06f237cdfd516caff0766b5f8ba4e4b2b85824c76910f1374ffce72173f8269", + "action": "add" + } + }, + "StoreConfig": { + "1": { + "version": 1, + "hash": "5cba60386142434e1d51e8dcd2fe9e0b58c6de994ccede76a527b0c893a23a97", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "1": { + "version": 1, + "hash": "ae77fb9f24004635a29abd979f5efa5a75780efe4fec1773cc533ac04aa57482", + "action": "add" + } + }, + "NodeSettings": { + "1": { + "version": 1, + "hash": "2735f660f23bfda8ffdf97b8ba3ab1bcdba01461b245832e9a9cb2a661ebcb74", + "action": "add" + } + }, + "HTTPConnection": { + "1": { + "version": 1, + "hash": "0a6e181e67978ce45a2414be4c4c39272ca6ed38a5fe9e9877619c13dc6aafef", + "action": "add" + } + }, + "PythonConnection": { + "1": { + "version": 1, + "hash": "6ab9e80d2208ce44fb6e5db8427234680f56b1ef8092be701d6d833b6e213926", + "action": "add" + } + }, + "DateTime": { + "1": { + "version": 1, + "hash": "34f9942a3f75988a1de6e46c40697698f2505336cf74282f683cfd3a7d6d4ec1", + "action": "add" + } + }, + "BlobFile": { + "1": { + "version": 1, + "hash": "229121eb07430f72c66281764a690270ff821a6551c036528b9e749b343bedc1", + "action": "add" + } + }, + "SecureFilePathLocation": { + "1": { + "version": 1, + "hash": "d3e6b95de5da0861922c302e9dabf443ee337b21da695d69c85bdb1e6f0ec45b", + "action": "add" + } + }, + "SeaweedSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "0d5c382191c63e68b90237bb4e882abea6311ff1ba645adc784ee272de5f4623", + "action": "add" + } + }, + "BlobStorageEntry": { + "1": { + "version": 1, + "hash": "e010b50076f73fb934029a583310d13c3ec7abaa93520090fae3fb16457868fc", + "action": "add" + } + }, + "BlobStorageMetadata": { + "1": { + "version": 1, + "hash": "f1d0b4085276ba5f15a8cd81553374d465317d96e0a1c427e33e2d866e362d22", + "action": "add" + } + }, + "CreateBlobStorageEntry": { + "1": { + "version": 1, + "hash": "183fd3ed16b0687f01979b3b76bbb17a9b029bd39f11d46faf54b3e5205e9e2d", + "action": "add" + } + }, + "BlobRetrieval": { + "1": { + "version": 1, + "hash": "c55f486ea79e9e96c047b32987e555357248cd30f18e1a8244fefe73457e5b9e", + "action": "add" + } + }, + "SyftObjectRetrieval": { + "1": { + "version": 1, + "hash": "da96484c8e57fc060c4dba29ef4e375284720dd05f1ed2ee60e1df52450437cd", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "1": { + "version": 1, + "hash": "656a44e91ce560056679b459c9fd33b55a5c2b0754e455099a456074e2e14822", + "action": "add" + } + }, + "BlobDeposit": { + "1": { + "version": 1, + "hash": "23a73cc9bff8e6833681e55d872121b6d54520d76f9426fd199a19eb847deea4", + "action": "add" + } + }, + "WorkerSettings": { + "1": { + "version": 1, + "hash": "2fe75dd39cb6367bd9cea2c7f59e40a85bbbcfc44f518572f377ef25c3acd205", + "action": "add" + } + }, + "HTTPNodeRoute": { + "1": { + "version": 1, + "hash": "b4662c11f7487ab907caf3cadf8c33eca2e0fbd640ed1fba872c3f982b749986", + "action": "add" + } + }, + "PythonNodeRoute": { + "1": { + "version": 1, + "hash": "d8f268996e5443a248cc2eb5f4a568591e5f9e18952f8519f3f995e1f1f486e4", + "action": "add" + } + }, + "EnclaveMetadata": { + "1": { + "version": 1, + "hash": "6ad19306231ebbb4d8b0c4e4cc82b881298835862a2c48f2358db47215a020ac", + "action": "add" + } + }, + "DataSubject": { + "1": { + "version": 1, + "hash": "287ab306e1c4ebe0c883600ffd52dc734d08f0217b2a961afbdc6c7883bf4ccd", + "action": "add" + } + }, + "DataSubjectCreate": { + "1": { + "version": 1, + "hash": "8b3487af42ba249d7cf705c7f66a09dd832c055814091def34a38133f8148158", + "action": "add" + } + }, + "DataSubjectMemberRelationship": { + "1": { + "version": 1, + "hash": "6aed0e1548d6b09bfac132040f7315f49c13855a6bc147a4f1aa4ce09572b952", + "action": "add" + } + }, + "Contributor": { + "1": { + "version": 1, + "hash": "3e27f1ea23cecfc3e0956743ae832f0f70ecd144f9df3f128b123e9347944afa", + "action": "add" + } + }, + "MarkdownDescription": { + "1": { + "version": 1, + "hash": "506d47fa85728ad444f2fa657e39b341bc759d21a80325831b1e84926ee748f1", + "action": "add" + } + }, + "Asset": { + "1": { + "version": 1, + "hash": "f8370e8dd87df6a05bde1226c52c3ce6e7de636e6032341b977efe5e601a731d", + "action": "add" + } + }, + "CreateAsset": { + "1": { + "version": 1, + "hash": "c3a82856694adcb3c916a961776c2fa3bc90a7ccb50b8b9d42776810266ed241", + "action": "add" + } + }, + "Dataset": { + "1": { + "version": 1, + "hash": "d7a45bf9128472715e63a67192d4181672eadd8b5873d2ba96c2df3b2be749b9", + "action": "add" + } + }, + "DatasetPageView": { + "1": { + "version": 1, + "hash": "c7da1fac28f70c19d199f123b04fbd4a9c7681e3846dee0de70ea926a9440a2e", + "action": "add" + } + }, + "CreateDataset": { + "1": { + "version": 1, + "hash": "6a31617de99738bc176f19397884581051443c7c7ba952c983929fed688a9d7e", + "action": "add" + } + }, + "ActionDataEmpty": { + "1": { + "version": 1, + "hash": "fc83d91ac6ba78c749a4e7e16c1aa117baaae62b5f33156ded4e5a79b9532a98", + "action": "add" + } + }, + "ActionFileData": { + "1": { + "version": 1, + "hash": "47a0a5f9fb48de09885d4f9a6c5a5a05f4dd04575ea42df7dea0cab02817063f", + "action": "add" + } + }, + "Action": { + "1": { + "version": 1, + "hash": "204b3c97b41f208ecb17c541af55e171675faaefa10c103b405a4077b0226a7e", + "action": "add" + } + }, + "AnyActionObject": { + "1": { + "version": 1, + "hash": "f11bd6135ba46247c06bfa8a3c6b7f2e540a1033afe0512c3359e31eb3d59388", + "action": "add" + } + }, + "TwinObject": { + "1": { + "version": 1, + "hash": "8f6abd756d41f9639865c6fd55e6cc2ec6b89fd18bac3c77bf3a0502d81ca8ec", + "action": "add" + } + }, + "ExactMatch": { + "1": { + "version": 1, + "hash": "f71a495d2452190596fe435eaf59b07ba390d7895f6c808a2f87a1719227ba9c", + "action": "add" + } + }, + "OutputHistory": { + "1": { + "version": 1, + "hash": "24f282dd181ecc7a05c93c02dff84dff45aa52928e2331a15ef667645e9bcf0b", + "action": "add" + } + }, + "OutputPolicyExecuteCount": { + "1": { + "version": 1, + "hash": "95d198d2021dbaf9965df39a4571ad06e787684ff79bd6e8a720c47825eebd7e", + "action": "add" + } + }, + "OutputPolicyExecuteOnce": { + "1": { + "version": 1, + "hash": "b8bc1fea2e9b51b5dfc3cbd4b0a131cb2a5b1fe746b5e329395bf319b38bf9b2", + "action": "add" + } + }, + "UserPolicy": { + "1": { + "version": 1, + "hash": "ec3f8ea84e2b05ce56db8f35cff14f78569e921f566477581b3299eb6a9fa053", + "action": "add" + } + }, + "SubmitUserPolicy": { + "1": { + "version": 1, + "hash": "3147a4e45270367a40ca8c4b7b502c8683200d123828720365521b90b2775794", + "action": "add" + } + }, + "UserCode": { + "1": { + "version": 1, + "hash": "ad509fccb7bb5c00971453c1f1235785f40d5d5b3eee3df1dc9edafc758c7193", + "action": "add" + } + }, + "SubmitUserCode": { + "1": { + "version": 1, + "hash": "b4a919054beb2488c7b4839d60c494400d791112adf009631dce13787cd58e78", + "action": "add" + } + }, + "UserCodeExecutionResult": { + "1": { + "version": 1, + "hash": "9aab24def6616ac908ef1100b654b4dbeca1ea4cfff229c53d6b14491c795555", + "action": "add" + } + }, + "CodeHistory": { + "1": { + "version": 1, + "hash": "bbbd59801717a98691034a37c9de388c7a65db2721bd0c5c9ff0dbe8fc70be67", + "action": "add" + } + }, + "CodeHistoryView": { + "1": { + "version": 1, + "hash": "142e78415da10dae739e923d39ce511496a3c7b31e8c4553a6cbb1907c126a3a", + "action": "add" + } + }, + "CodeHistoriesDict": { + "1": { + "version": 1, + "hash": "453af101a1de8e37a0bfacf22077c35994718791f295956f1f06727f8d9b7fe8", + "action": "add" + } + }, + "UsersCodeHistoriesDict": { + "1": { + "version": 1, + "hash": "cf8ef92a08cabb068e4616c1700451b75ba4d511343830e3e56b65882fb784aa", + "action": "add" + } + }, + "NodePeer": { + "1": { + "version": 1, + "hash": "8920d9e456fd1a13f46c0528a8fe3fec8af46440b3eb89e8d7d48ad64babee1e", + "action": "add" + } + }, + "CommandReport": { + "1": { + "version": 1, + "hash": "a81fe3d0cc5796f45e925d09b6e8132b79fe5df0f341d55b3870c109f8c1e19d", + "action": "add" + } + }, + "CommandResult": { + "1": { + "version": 1, + "hash": "14b48d4a1cbc5f5ae1e5e74834e7f1002adae7b2766000ea822f180fd7cd39db", + "action": "add" + } + }, + "VPNClientConnection": { + "1": { + "version": 1, + "hash": "7d44711978f930d42c06d04483abcdb1f230782a8f16f4feb5efb7b2b2093bb2", + "action": "add" + } + }, + "HeadscaleAuthToken": { + "1": { + "version": 1, + "hash": "0b363503b6c611b44e33561a2a6db3f260cfd4bbc5f4245deac5052fd5149803", + "action": "add" + } + }, + "TailscalePeer": { + "1": { + "version": 1, + "hash": "8ff85aa2b913a6bb206b9de0495d9f74a17f55823891da98cb6fdbe78f46a44b", + "action": "add" + } + }, + "TailscaleStatus": { + "1": { + "version": 1, + "hash": "ed262f4b9a569d9933f4a86cd2caa2ce213fc7a2319a1371f6a3cf3ccf884c8a", + "action": "add" + } + }, + "OnDiskBlobDeposit": { + "1": { + "version": 1, + "hash": "da3abda453def0d7c70c8a5dfcc3c8d00dd6822f60ddc01be3bdead4b0b5b482", + "action": "add" + } + }, + "SeaweedFSBlobDeposit": { + "1": { + "version": 1, + "hash": "bcbec5dcdc06a0c87f89a10a6a8809706f24cedd97b5f850f8b48840a1f41941", + "action": "add" + } + }, + "DictStoreConfig": { + "1": { + "version": 1, + "hash": "b925934f43b72ddf87c420667e16b7d8351f4294997a4a01a43c5958e5e7b465", + "action": "add" + } + }, + "NumpyArrayObject": { + "1": { + "version": 1, + "hash": "d47a376401d92d47e5748e34f98ee270f8ebfd52cffbe6271b5faa8193e728c5", + "action": "add" + } + }, + "NumpyScalarObject": { + "1": { + "version": 1, + "hash": "952bebb4dd3e3641c33b4ebcf2c051dbdebae5f1bf3b7b63ea89423360705411", + "action": "add" + } + }, + "NumpyBoolObject": { + "1": { + "version": 1, + "hash": "b7a231baaa4b1f519d70c5afb15b4a9b7232f1128f7fd3709c1ea8b7345f8c6c", + "action": "add" + } + }, + "PandasDataframeObject": { + "1": { + "version": 1, + "hash": "ff9d6c1884413f712d95d29190e30938b33de19e11dff9f88d9b89c51499cac5", + "action": "add" + } + }, + "PandasSeriesObject": { + "1": { + "version": 1, + "hash": "69eadfe14e5a7035767d2538e2db8775da6569cf5127f58d13315c4b85e5603d", + "action": "add" + } + }, + "ReplyNotification": { + "1": { + "version": 1, + "hash": "ce1e2a6b0d618478d3b1b992e4c8605817919c88a4884ca0540e0886ecdb8215", + "action": "add" + } + }, + "Notification": { + "1": { + "version": 1, + "hash": "1e5a65d91e27bf53d5b2ed0b45d9cee0cf77104b7111f99223194ceb0d0137fe", + "action": "add" + } + }, + "CreateNotification": { + "1": { + "version": 1, + "hash": "6858b743ac07d853a0302dc64de0e7d852135e2564ebad325d5ff35d17c29f6f", + "action": "add" + } + }, + "Change": { + "1": { + "version": 1, + "hash": "2c470ff8aa076b88105640ce79d361a9b439927e501c238fa33ac7c1c45aa2c0", + "action": "add" + } + }, + "ChangeStatus": { + "1": { + "version": 1, + "hash": "7571229e92d8d52a0e90fa8856169b41045b42b50568b266823bdcea838dfb39", + "action": "add" + } + }, + "ActionStoreChange": { + "1": { + "version": 1, + "hash": "cf527995930cce09d90806713d30301493240079319bcc85e894428aee46017e", + "action": "add" + } + }, + "Request": { + "1": { + "version": 1, + "hash": "340f4ac61ccbf6f566666327d6bca043dcd643e6f8e24897ef10bd6312e74995", + "action": "add" + } + }, + "RequestInfo": { + "1": { + "version": 1, + "hash": "d571708de3c187ca5840c64784d99f7bfce8f33aa2ba48f9d56b824564551654", + "action": "add" + } + }, + "RequestInfoFilter": { + "1": { + "version": 1, + "hash": "c336af8d474071eb61e5f467513753e64d4e153e12892f9c6875b235057b0f0a", + "action": "add" + } + }, + "SubmitRequest": { + "1": { + "version": 1, + "hash": "1870ce541169eab04cb69d3ae88ea30dc2fcdd997b620567ca9d87936d9600cf", + "action": "add" + } + }, + "ObjectMutation": { + "1": { + "version": 1, + "hash": "275d9cf180904d1e34e1f3d7e838105e843127faf5a64029a1cf85d00234b8c9", + "action": "add" + } + }, + "EnumMutation": { + "1": { + "version": 1, + "hash": "3a1d1b47e0cdb5094298bce58bc9b76ecb66064459504f910c7d755eb1d5e276", + "action": "add" + } + }, + "UserCodeStatusChange": { + "1": { + "version": 1, + "hash": "928dd4ceeb4858b18b806ca62b49a840f54269f7866744c9aa6edb0af9d7dfc1", + "action": "add" + } + }, + "SyftObjectMigrationState": { + "1": { + "version": 1, + "hash": "194fd4dc57764d454ac763d256e3bfcd2b0040a134daf9ee0d8e5ac7ab21abbc", + "action": "add" + } + }, + "ProjectThreadMessage": { + "1": { + "version": 1, + "hash": "9f8b11d603caae6d0e0f28957949dfc57c26fec9685f2c80501330b1d6bae665", + "action": "add" + } + }, + "ProjectMessage": { + "1": { + "version": 1, + "hash": "d678beafc33f7e7df7e771a82d5cba6d5a36728a033d3976b6e5998726733d27", + "action": "add" + } + }, + "ProjectRequestResponse": { + "1": { + "version": 1, + "hash": "51b5a5d8cf0bde45abd2bd3a4411f93769fa542666a137ce9611d38fb48ffb4c", + "action": "add" + } + }, + "ProjectRequest": { + "1": { + "version": 1, + "hash": "9eff1b3cc74c9706722363abb4062fc77c0a4f093d448b795ad662861649f111", + "action": "add" + } + }, + "AnswerProjectPoll": { + "1": { + "version": 1, + "hash": "f538a4fcae286cbc9755f51e2f2ce8809d66ce5d66f50173ef1824f89ce9b51d", + "action": "add" + } + }, + "ProjectPoll": { + "1": { + "version": 1, + "hash": "b456a699a249fd3fffe9739cdd9ec3ee8c05e59b2d9872ad9864167d78088091", + "action": "add" + } + }, + "Project": { + "1": { + "version": 1, + "hash": "bf59890e92d95b362cc7ef9c3d7fa6a1815978e02111a30cbcb047239e57d61e", + "action": "add" + } + }, + "ProjectSubmit": { + "1": { + "version": 1, + "hash": "5084844056ddefcea7fc634dd9945c03ef6d030bcd8f63aa07fe11fea0a5389f", + "action": "add" + } + }, + "QueueItem": { + "1": { + "version": 1, + "hash": "1d53446d5cd788120b15ea6b108a4a7abd480377370be7128f44297f8fb00b76", + "action": "add" + } + }, + "ZMQClientConfig": { + "1": { + "version": 1, + "hash": "e3153f18c9fd04cf07b844153d093c8a090baac4c99d71ecd6491961e7f1dafb", + "action": "add" + } + }, + "SQLiteStoreConfig": { + "1": { + "version": 1, + "hash": "f4497d4a972814e3c27d610487e7ab578d8353dd1635f11aab4d5cbe31f721a8", + "action": "add" + } + }, + "Plan": { + "1": { + "version": 1, + "hash": "41713fc89a2cab7db592df6cd1c45e1309f86a50a8f531ddaf4052947186b0e0", + "action": "add" + } + } + } + } +} diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index a49bcedd780..9aaa5f56a58 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -222,6 +222,9 @@ class ActionObjectPointer: class PreHookContext(SyftBaseObject): + __canonical_name__ = "PreHookContext" + __version__ = SYFT_OBJECT_VERSION_1 + """Hook context Parameters: diff --git a/packages/syft/src/syft/service/context.py b/packages/syft/src/syft/service/context.py index 5f8613ad3d0..affbc41529f 100644 --- a/packages/syft/src/syft/service/context.py +++ b/packages/syft/src/syft/service/context.py @@ -57,6 +57,9 @@ class UnauthedServiceContext(NodeServiceContext): class ChangeContext(SyftBaseObject): + __canonical_name__ = "ChangeContext" + __version__ = SYFT_OBJECT_VERSION_1 + node: Optional[AbstractNode] = None approving_user_credentials: Optional[SyftVerifyKey] requesting_user_credentials: Optional[SyftVerifyKey] diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index 016854f1a20..91dfa30131a 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -23,6 +23,7 @@ from ...types.syft_object import SyftObject from ...types.transforms import convert_types from ...types.transforms import drop +from ...types.transforms import rename from ...types.transforms import transform from ...types.uid import UID @@ -155,6 +156,15 @@ def check_version(self, client_version: str) -> bool: ) +@transform(NodeMetadataV2, NodeMetadataJSON) +def metadata_to_json() -> List[Callable]: + return [ + drop(["__canonical_name__"]), + rename("__version__", "metadata_version"), + convert_types(["id", "verify_key", "node_type"], str), + ] + + @transform(NodeMetadataJSON, NodeMetadataV2) def json_to_metadata() -> List[Callable]: return [ diff --git a/packages/syft/src/syft/service/service.py b/packages/syft/src/syft/service/service.py index 1011cddc1d8..52fb081b541 100644 --- a/packages/syft/src/syft/service/service.py +++ b/packages/syft/src/syft/service/service.py @@ -33,6 +33,7 @@ from ..serde.signature import signature_remove_context from ..serde.signature import signature_remove_self from ..store.linked_obj import LinkedObject +from ..types.syft_object import SYFT_OBJECT_VERSION_1 from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftObject from ..types.syft_object import attach_attribute_to_syft_object @@ -74,6 +75,9 @@ def resolve_link( @serializable() class BaseConfig(SyftBaseObject): + __canonical_name__ = "BaseConfig" + __version__ = SYFT_OBJECT_VERSION_1 + public_path: str private_path: str public_name: str @@ -86,6 +90,7 @@ class BaseConfig(SyftBaseObject): @serializable() class ServiceConfig(BaseConfig): + __canonical_name__ = "ServiceConfig" permissions: List roles: List[ServiceRole] @@ -95,6 +100,7 @@ def has_permission(self, user_service_role: ServiceRole): @serializable() class LibConfig(BaseConfig): + __canonical_name__ = "LibConfig" permissions: Set[CMPPermission] def has_permission(self, credentials: SyftVerifyKey): diff --git a/packages/syft/src/syft/store/dict_document_store.py b/packages/syft/src/syft/store/dict_document_store.py index b6af5e4bc0e..516a2fc85c5 100644 --- a/packages/syft/src/syft/store/dict_document_store.py +++ b/packages/syft/src/syft/store/dict_document_store.py @@ -78,6 +78,7 @@ def reset(self): @serializable() class DictStoreConfig(StoreConfig): + __canonical_name__ = "DictStoreConfig" """Dictionary-based configuration Parameters: diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index 05b5a94d16e..b41f60ff17b 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -26,6 +26,7 @@ from ..service.action.action_permissions import ActionObjectWRITE from ..service.action.action_permissions import ActionPermission from ..service.response import SyftSuccess +from ..types.syft_object import SYFT_OBJECT_VERSION_1 from ..types.syft_object import StorableObjectType from ..types.syft_object import SyftBaseObject from ..types.syft_object import SyftObject @@ -47,6 +48,9 @@ @serializable() class MongoDict(SyftBaseObject): + __canonical_name__ = "MongoDict" + __version__ = SYFT_OBJECT_VERSION_1 + keys: List[Any] values: List[Any] diff --git a/packages/syft/src/syft/store/sqlite_document_store.py b/packages/syft/src/syft/store/sqlite_document_store.py index 27195ae7ac5..856f604da09 100644 --- a/packages/syft/src/syft/store/sqlite_document_store.py +++ b/packages/syft/src/syft/store/sqlite_document_store.py @@ -386,6 +386,7 @@ def file_path(self) -> Optional[Path]: @serializable() class SQLiteStoreConfig(StoreConfig): + __canonical_name__ = "SQLiteStoreConfig" """SQLite Store config, used by SQLiteStorePartition Parameters: diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index c4146c610bd..0017832be15 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -50,11 +50,6 @@ SYFT_OBJECT_VERSION_1 = 1 SYFT_OBJECT_VERSION_2 = 2 -supported_object_versions = [SYFT_OBJECT_VERSION_1, SYFT_OBJECT_VERSION_2] - -HIGHEST_SYFT_OBJECT_VERSION = max(supported_object_versions) -LOWEST_SYFT_OBJECT_VERSION = min(supported_object_versions) - # These attributes are dynamically added based on node/client # that is interaction with the SyftObject @@ -95,6 +90,9 @@ def _set_obj_location_(self, node_uid, credentials): class Context(SyftBaseObject): + __canonical_name__ = "Context" + __version__ = SYFT_OBJECT_VERSION_1 + pass diff --git a/packages/syft/tests/syft/hash_test.py b/packages/syft/tests/syft/hash_test.py index 9032a64a3aa..68655836437 100644 --- a/packages/syft/tests/syft/hash_test.py +++ b/packages/syft/tests/syft/hash_test.py @@ -4,6 +4,7 @@ # syft absolute from syft.serde.serializable import serializable +from syft.types.syft_object import SYFT_OBJECT_VERSION_1 from syft.types.syft_object import SyftBaseObject from syft.types.syft_object import SyftHashableObject @@ -25,6 +26,9 @@ def __init__(self, key, value, flag=None): @serializable(attrs=["id", "data"]) class MockWrapper(SyftBaseObject, SyftHashableObject): + __canonical_name__ = "MockWrapper" + __version__ = SYFT_OBJECT_VERSION_1 + id: str data: Optional[MockObject] diff --git a/packages/syft/tests/syft/settings/fixtures.py b/packages/syft/tests/syft/settings/fixtures.py index 1a4c6ac0faf..ab8ee071f59 100644 --- a/packages/syft/tests/syft/settings/fixtures.py +++ b/packages/syft/tests/syft/settings/fixtures.py @@ -12,8 +12,7 @@ from syft.service.settings.settings import NodeSettingsUpdate from syft.service.settings.settings_service import SettingsService from syft.service.settings.settings_stash import SettingsStash -from syft.types.syft_object import HIGHEST_SYFT_OBJECT_VERSION -from syft.types.syft_object import LOWEST_SYFT_OBJECT_VERSION +from syft.types.syft_object import SYFT_OBJECT_VERSION_1 @pytest.fixture @@ -52,8 +51,8 @@ def metadata_json(faker) -> NodeMetadataJSON: name=faker.name(), id=faker.text(), verify_key=faker.text(), - highest_object_version=HIGHEST_SYFT_OBJECT_VERSION, - lowest_object_version=LOWEST_SYFT_OBJECT_VERSION, + highest_object_version=SYFT_OBJECT_VERSION_1, + lowest_object_version=SYFT_OBJECT_VERSION_1, syft_version=__version__, signup_enabled=False, admin_email="info@openmined.org", From e55dbb1eafdb7a96490026602db87b61bfa143bf Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 2 Oct 2023 15:31:40 +1000 Subject: [PATCH 35/67] Refined DataProtocol methods for staging and bumping versions - Swapped NodeMetadataV2 to NodeMetadata --- packages/syft/src/syft/__init__.py | 4 +- packages/syft/src/syft/client/client.py | 4 +- packages/syft/src/syft/node/node.py | 8 +- .../syft/src/syft/protocol/data_protocol.py | 496 +++++------ .../src/syft/protocol/protocol_version.json | 775 +----------------- .../src/syft/service/metadata/migrations.py | 6 +- .../syft/service/metadata/node_metadata.py | 37 +- packages/syft/tests/syft/settings/fixtures.py | 6 +- .../syft/settings/settings_service_test.py | 6 +- 9 files changed, 239 insertions(+), 1103 deletions(-) diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 72c46ee1d06..8acc731933a 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -32,7 +32,9 @@ from .node.server import serve_node # noqa: F401 from .node.server import serve_node as bind_worker # noqa: F401 from .node.worker import Worker # noqa: F401 -from .protocol.data_protocol import upgrade_protocol # noqa: F401 +from .protocol.data_protocol import bump_protocol_version # noqa: F401 +from .protocol.data_protocol import get_data_protocol # noqa: F401 +from .protocol.data_protocol import stage_protocol_changes # noqa: F401 from .serde import NOTHING # noqa: F401 from .serde.deserialize import _deserialize as deserialize # noqa: F401 from .serde.serializable import serializable # noqa: F401 diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 7dd75b5a056..1ad7ba8937b 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -476,11 +476,11 @@ def post_init(self) -> None: if self.metadata is None: self._fetch_node_metadata(self.credentials) - self.communication_protocol = self.__get_communication_protocol( + self.communication_protocol = self._get_communication_protocol( self.metadata.supported_protocols ) - def __get_communication_protocol(self, protocols_supported_by_server: List) -> int: + def _get_communication_protocol(self, protocols_supported_by_server: List) -> int: data_protocol: DataProtocol = get_data_protocol() protocols_supported_by_client: List[int] = data_protocol.supported_protocols self.current_protocol = data_protocol.latest_version diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 9c8b5c5452e..4e1f5025cfc 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -60,7 +60,7 @@ from ..service.dataset.dataset_service import DatasetService from ..service.enclave.enclave_service import EnclaveService from ..service.metadata.metadata_service import MetadataService -from ..service.metadata.node_metadata import NodeMetadataV2 +from ..service.metadata.node_metadata import NodeMetadata from ..service.network.network_service import NetworkService from ..service.notification.notification_service import NotificationService from ..service.object_search.migration_state_service import MigrateStateService @@ -452,7 +452,7 @@ def root_client(self): root_client.api.refresh_api_callback() return root_client - def __validate_data_migration_state(self): + def _validate_data_migration_state(self): partition_to_be_migrated = [] migration_state_service = self.get_service(MigrateStateService) for partition_settings in self.document_store.partitions.values(): @@ -646,7 +646,7 @@ def _get_service_method_from_path(self, path: str) -> Callable: return getattr(service_obj, method_name) @property - def metadata(self) -> NodeMetadataV2: + def metadata(self) -> NodeMetadata: name = "" deployed_on = "" organization = "" @@ -669,7 +669,7 @@ def metadata(self) -> NodeMetadataV2: admin_email = settings_data.admin_email show_warnings = settings_data.show_warnings - return NodeMetadataV2( + return NodeMetadata( name=name, id=self.id, verify_key=self.verify_key, diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 137b1fb885f..80d28c6959c 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -9,6 +9,7 @@ from typing import Optional from typing import Tuple from typing import Type +from typing import Union # third party from result import Result @@ -20,6 +21,8 @@ from ..service.response import SyftSuccess from ..types.syft_object import SyftBaseObject +PROTOCOL_STATE_FILENAME = "protocol_version.json" + def natural_key(key: int | str) -> list[int]: """Define key for natural ordering of strings.""" @@ -33,44 +36,6 @@ def sort_dict_naturally(d: dict) -> dict: return {k: d[k] for k in sorted(d.keys(), key=natural_key)} -def protocol_state_builder(protocol_dict: dict, stop_key: Optional[str] = None) -> dict: - sorted_dict = sort_dict_naturally(protocol_dict) - state_dict = defaultdict(dict) - for k, _v in sorted_dict.items(): - # stop early - if stop_key == k: - return state_dict - object_versions = sorted_dict[k]["object_versions"] - for canonical_name, versions in object_versions.items(): - for version, object_metadata in versions.items(): - action = object_metadata["action"] - version = object_metadata["version"] - hash_str = object_metadata["hash"] - state_versions = state_dict[canonical_name] - if action == "add" and ( - str(version) in state_versions.keys() - or hash_str in state_versions.values() - ): - raise Exception( - f"Can't add {object_metadata} already in state {versions}" - ) - elif action == "remove" and ( - str(version) not in state_versions.keys() - or hash_str not in state_versions.values() - ): - raise Exception( - f"Can't remove {object_metadata} missing from state {versions}" - ) - if action == "add": - state_dict[canonical_name][str(version)] = hash_str - elif action == "remove": - del state_dict[canonical_name][str(version)] - return state_dict - - -PROTOCOL_STATE_FILENAME = "protocol_version.json" - - def data_protocol_file_name(): return PROTOCOL_STATE_FILENAME @@ -79,92 +44,24 @@ def data_protocol_dir(): return os.path.abspath(str(Path(__file__).parent)) -class InConsistentVersionException(Exception): - pass - - -def diff_state(state: dict) -> dict: - object_diff = defaultdict(dict) - compare_dict = defaultdict(dict) - for k in TYPE_BANK: - ( - nonrecursive, - serialize, - deserialize, - attribute_list, - exclude_attrs_list, - serde_overrides, - hash_exclude_attrs, - cls, - attribute_types, - version, - ) = TYPE_BANK[k] - if issubclass(cls, SyftBaseObject): - canonical_name = cls.__canonical_name__ - hash_str = DataProtocol._calculate_object_hash(cls) - - # build this up for later - compare_dict[canonical_name][version] = hash_str - - if canonical_name not in state: - # new object so its an add - object_diff[canonical_name][str(version)] = {} - object_diff[canonical_name][str(version)]["version"] = version - object_diff[canonical_name][str(version)]["hash"] = hash_str - object_diff[canonical_name][str(version)]["action"] = "add" - continue - - versions = state[canonical_name] - if str(version) in versions.keys() and versions[str(version)] == hash_str: - # already there so do nothing - continue - elif str(version) in versions.keys(): - raise Exception( - f"{canonical_name} {cls} version {version} hash has changed. " - + f"{hash_str} not in {versions.values()}. " - + "You probably need to bump the version number." - ) - else: - # new object so its an add - object_diff[canonical_name][str(version)] = {} - object_diff[canonical_name][str(version)]["version"] = version - object_diff[canonical_name][str(version)]["hash"] = hash_str - object_diff[canonical_name][str(version)]["action"] = "add" - continue - - # now check for remove actions - for canonical_name in state: - for version, hash_str in state[canonical_name].items(): - if canonical_name not in compare_dict: - # missing so its a remove - object_diff[canonical_name][str(version)] = {} - object_diff[canonical_name][str(version)]["version"] = version - object_diff[canonical_name][str(version)]["hash"] = hash_str - object_diff[canonical_name][str(version)]["action"] = "remove" - continue - versions = compare_dict[canonical_name] - if str(version) in versions.keys(): - # missing so its a remove - object_diff[canonical_name][str(version)] = {} - object_diff[canonical_name][str(version)]["version"] = version - object_diff[canonical_name][str(version)]["hash"] = hash_str - object_diff[canonical_name][str(version)]["action"] = "remove" - continue - return object_diff - - class DataProtocol: def __init__(self, filename: str) -> None: self.file_path = Path(data_protocol_dir()) / filename + self.load_state() + + def load_state(self) -> None: self.protocol_history = self.read_history() self.state = self.build_state() + self.diff, self.current = self.diff_state(self.state) + self.protocol_support = self.calculate_supported_protocols() @staticmethod def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: # TODO: this depends on what is marked as serde + field_name_keys = sorted(klass.__fields__.keys()) field_data = { - field_name: repr(model_field.annotation) - for field_name, model_field in klass.__fields__.items() + field_name: repr(klass.__fields__[field_name].annotation) + for field_name in field_name_keys } obj_meta_info = { "canonical_name": klass.__canonical_name__, @@ -175,215 +72,224 @@ def _calculate_object_hash(klass: Type[SyftBaseObject]) -> str: return hashlib.sha256(json.dumps(obj_meta_info).encode()).hexdigest() - # def calc_latest_object_versions(self): - # object_latest_version_map = {} - # migration_registry = SyftMigrationRegistry.__migration_version_registry__ - # for canonical_name in migration_registry: - # available_versions = migration_registry[canonical_name] - # version_obj_hash_map = {} - # for object_version, fqn in available_versions.items(): - # object_klass = index_syft_by_module_name(fqn) - # object_hash = self._calculate_object_hash(object_klass) - # version_obj_hash_map[object_version] = object_hash - # object_latest_version_map[canonical_name] = version_obj_hash_map - - # return object_latest_version_map - def read_history(self) -> Dict: return json.loads(self.file_path.read_text()) def save_history(self, history: dict) -> None: - self.file_path.write_text(json.dumps(history, indent=2)) - - # def find_deleted_versions( - # self, - # current_object_to_version_map: Dict, - # new_object_to_version_map: Dict, - # ): - # deleted_object_classes = set(current_object_to_version_map).difference( - # new_object_to_version_map.keys() - # ) - - # deleted_versions_map = {} - - # for canonical_name, new_versions in new_object_to_version_map.items(): - # current_versions = current_object_to_version_map.get( - # canonical_name, - # None, - # ) - # if current_versions is None: - # continue - - # deleted_versions = list(set(current_versions).difference(new_versions)) - # deleted_versions_map[canonical_name] = deleted_versions - - # return deleted_object_classes, deleted_versions_map - - # def recompute_supported_states( - # self, - # current_protocol_version: int, - # new_object_to_version_map: Dict, - # ): - # current_protocol_state = self.state[str(current_protocol_version)] - # deleted_object_classes, deleted_versions_map = self.find_deleted_versions( - # current_protocol_state, - # new_object_to_version_map=new_object_to_version_map, - # ) - - # for _, protocol_state in self.state.items(): - # object_versions = protocol_state["object_versions"] - # if protocol_state["supported"]: - # continue - - # # Check if any object class is deleted, - # # then mark the protocol as not supported. - # is_unsupported = any( - # object_class in object_versions - # for object_class in deleted_object_classes - # ) - # if is_unsupported: - # protocol_state["supported"] = False - # continue - - # for object_class, supported_versions in deleted_versions_map.items(): - # available_versions = object_versions.get(object_class, []) - # unsupported_versions_present = set(available_versions).intersection( - # supported_versions - # ) - # if unsupported_versions_present: - # is_unsupported = True - # break - - # if is_unsupported: - # protocol_state["supported"] = False - - # @property - # def state_defined(self): - # return len(self.state) > 0 - - # @property - # def latest_version(self): - # return int(max(self.state.keys())) + self.file_path.write_text(json.dumps(history, indent=2) + "\n") + + @property + def latest_version(self) -> str: + sorted_versions = natural_key(self.state.keys()) + if len(sorted_versions) > 0: + return sorted_versions[-1] + return "dev" @staticmethod def _hash_to_sha256(obj_dict: Dict) -> str: return hashlib.sha256(json.dumps(obj_dict).encode()).hexdigest() - def build_state(self) -> dict: - return protocol_state_builder(self.protocol_history) - - def diff(self, state: dict) -> dict: - return diff_state(state) - - def upgrade(self) -> Result[SyftSuccess, SyftError]: - state = self.build_state() - print(">>> got state", state) - diff = self.diff(state) - print(">>> got diff", diff) + def build_state(self, stop_key: Optional[str] = None) -> dict: + sorted_dict = sort_dict_naturally(self.protocol_history) + state_dict = defaultdict(dict) + for k, _v in sorted_dict.items(): + # stop early + if stop_key == k: + return state_dict + object_versions = sorted_dict[k]["object_versions"] + for canonical_name, versions in object_versions.items(): + for version, object_metadata in versions.items(): + action = object_metadata["action"] + version = object_metadata["version"] + hash_str = object_metadata["hash"] + state_versions = state_dict[canonical_name] + if action == "add" and ( + str(version) in state_versions.keys() + or hash_str in state_versions.values() + ): + raise Exception( + f"Can't add {object_metadata} already in state {versions}" + ) + elif action == "remove" and ( + str(version) not in state_versions.keys() + or hash_str not in state_versions.values() + ): + raise Exception( + f"Can't remove {object_metadata} missing from state {versions}" + ) + if action == "add": + state_dict[canonical_name][str(version)] = hash_str + elif action == "remove": + del state_dict[canonical_name][str(version)] + return state_dict + + def diff_state(self, state: dict) -> tuple[dict, dict]: + compare_dict = defaultdict(dict) # what versions are in the latest code + object_diff = defaultdict(dict) # diff in latest code with saved json + for k in TYPE_BANK: + ( + nonrecursive, + serialize, + deserialize, + attribute_list, + exclude_attrs_list, + serde_overrides, + hash_exclude_attrs, + cls, + attribute_types, + version, + ) = TYPE_BANK[k] + if issubclass(cls, SyftBaseObject): + canonical_name = cls.__canonical_name__ + hash_str = DataProtocol._calculate_object_hash(cls) + + # build this up for later + compare_dict[canonical_name][str(version)] = hash_str + + if canonical_name not in state: + # new object so its an add + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "add" + continue + + versions = state[canonical_name] + if ( + str(version) in versions.keys() + and versions[str(version)] == hash_str + ): + # already there so do nothing + continue + elif str(version) in versions.keys(): + raise Exception( + f"{canonical_name} {cls} version {version} hash has changed. " + + f"{hash_str} not in {versions.values()}. " + + "You probably need to bump the version number." + ) + else: + # new object so its an add + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "add" + continue + + # now check for remove actions + for canonical_name in state: + for version, hash_str in state[canonical_name].items(): + if canonical_name not in compare_dict: + # missing so its a remove + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "remove" + continue + versions = compare_dict[canonical_name] + if str(version) not in versions.keys(): + # missing so its a remove + object_diff[canonical_name][str(version)] = {} + object_diff[canonical_name][str(version)]["version"] = version + object_diff[canonical_name][str(version)]["hash"] = hash_str + object_diff[canonical_name][str(version)]["action"] = "remove" + continue + return object_diff, compare_dict + + def stage_protocol_changes(self) -> Result[SyftSuccess, SyftError]: + change_count = 0 current_history = self.protocol_history if "dev" not in current_history: current_history["dev"] = {} current_history["dev"]["object_versions"] = {} object_versions = current_history["dev"]["object_versions"] - for canonical_name, versions in diff.items(): + for canonical_name, versions in self.diff.items(): for version, version_metadata in versions.items(): if canonical_name not in object_versions: object_versions[canonical_name] = {} + change_count += 1 object_versions[canonical_name][version] = version_metadata current_history["dev"]["object_versions"] = object_versions - self.save_history(current_history) - # def bump_version(self) -> Result[SyftSuccess, SyftError]: - # state = self.build_state() - # print(">>> got state", state) - # diff = self.diff(state) - # print(">>> got diff", diff) - # current_history = self.protocol_history - # if "dev" not in current_history: - # current_history["dev"] = {} - # current_history["dev"]["object_versions"] = {} - # object_versions = current_history["dev"]["object_versions"] - # for canonical_name, versions in diff.items(): - # for version, version_metadata in versions.items(): - # if canonical_name not in object_versions: - # object_versions[canonical_name] = {} - # object_versions[canonical_name][version] = version_metadata - - # current_history["dev"]["object_versions"] = object_versions - # self.save_history(current_history) - - # def upgrade(self): - # object_to_version_map = self.calc_latest_object_versions() - # new_protocol_hash = self._hash_to_sha256(object_to_version_map) - - # if not self.state_defined: - # new_protocol_version = 1 - # else: - # # Find the current version - # current_protocol_version = self.latest_version - - # new_protocol_version = int(current_protocol_version) + 1 - - # current_protocol_state = self.state[str(current_protocol_version)] - # if current_protocol_state["hash"] == new_protocol_hash: - # print("No change in schema. Skipping upgrade.") - # return - - # self.recompute_supported_states( - # current_protocol_version=current_protocol_version, - # new_object_to_version_map=object_to_version_map, - # ) - - # self.state[new_protocol_version] = { - # "object_versions": object_to_version_map, - # "hash": new_protocol_hash, - # "supported": True, - # } - # self.save_state() - # return SyftSuccess(message="Protocol Updated") - - # def validate_current_state(self) -> bool: - # current_object_version_map = self.state[self.latest_version]["object_versions"] - # inconsistent_versions = [] - - # migration_registry = SyftMigrationRegistry.__migration_version_registry__ - # for canonical_name in migration_registry: - # available_versions = migration_registry[canonical_name] - # curr_version_hash_map = current_object_version_map.get(canonical_name, {}) - # for object_version, fqn in available_versions.items(): - # object_klass = index_syft_by_module_name(fqn) - # object_hash = self._calculate_object_hash(object_klass) - # if curr_version_hash_map.get(str(object_version), None) != object_hash: - # inconsistent_versions.append((canonical_name, object_version)) - - # if len(inconsistent_versions) > 0: - # raise InConsistentVersionException( - # f"Version update is required for the following objects.\n {inconsistent_versions}" - # ) - - # return True - - # @property - # def supported_protocols(self) -> List[int]: - # """Returns a list of protocol numbers that are marked as supported.""" - # return [ - # int(protocol_version) - # for protocol_version, protocol_state in self.state.items() - # if str_to_bool(protocol_state["supported"]) - # ] - - # def get_object_versions(self, protocol: Union[int, str]) -> List: - # return self.state[str(protocol)]["object_versions"] + # trim empty dev + if len(current_history["dev"]["object_versions"]) == 0: + del current_history["dev"] + + self.save_history(current_history) + self.load_state() + return SyftSuccess(message=f"{change_count} Protocol Updates Staged to dev") + + def bump_protocol_version(self) -> Result[SyftSuccess, SyftError]: + if len(self.diff): + raise Exception( + "You can't bump the protocol version with unstaged changes." + ) + + keys = self.protocol_history.keys() + if "dev" not in keys: + raise Exception( + "You can't bump the protocol if there are no staged changes." + ) + + highest_protocol = 0 + for k in self.protocol_history.keys(): + if k == "dev": + continue + highest_protocol = max(highest_protocol, int(k)) + + next_highest_protocol = highest_protocol + 1 + self.protocol_history[str(next_highest_protocol)] = self.protocol_history["dev"] + del self.protocol_history["dev"] + self.save_history(self.protocol_history) + self.load_state() + return SyftSuccess(message=f"Protocol Updated to {next_highest_protocol}") + + @property + def supported_protocols(self) -> list[Union[int, str]]: + """Returns a list of protocol numbers that are marked as supported.""" + supported = [] + for version, is_supported in self.protocol_support.items(): + if is_supported: + if version != "dev": + version = int(version) + supported.append(version) + return supported + + def calculate_supported_protocols(self) -> None: + protocol_supported = {} + # go through each historical protocol version + for v, version_data in self.protocol_history.items(): + # we assume its supported until we prove otherwise + protocol_supported[v] = True + # iterate through each object + for canonical_name, versions in version_data["object_versions"].items(): + if canonical_name not in self.state: + protocol_supported[v] = False + break + # does the current source code state support this object + protocol_history_highest = int(max(versions)) + state_highest = int(max(self.state[canonical_name])) + if protocol_history_highest != state_highest: + protocol_supported[v] = False + break + return protocol_supported + + def get_object_versions(self, protocol: Union[int, str]) -> list: + return self.protocol_history[str(protocol)]["object_versions"] def get_data_protocol(): return DataProtocol(filename=data_protocol_file_name()) -def upgrade_protocol(): +def stage_protocol_changes() -> Result[SyftSuccess, SyftError]: + data_protocol = get_data_protocol() + return data_protocol.stage_protocol_changes() + + +def bump_protocol_version() -> Result[SyftSuccess, SyftError]: data_protocol = get_data_protocol() - data_protocol.upgrade() + return data_protocol.bump_protocol_version() def migrate_args_and_kwargs( diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index a69ad8c0467..0967ef424bc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -1,774 +1 @@ -{ - "dev": { - "object_versions": { - "PartialSyftObject": { - "1": { - "version": 1, - "hash": "fa2770d76f3dd904e2c1c4a78c9ba10e5ac33b4dd7a4d61faa45a22078e94aa8", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "1": { - "version": 1, - "hash": "b73adf379a179f6ef9a6d5ee9477e69697a47ad25579bab46cc1fc44ec35ba04", - "action": "add" - } - }, - "NodeMetadata": { - "1": { - "version": 1, - "hash": "015a04cbfa19adbad7f606e0c419a1dc615a27eff7b76c426853ea3ca4eda860", - "action": "add" - }, - "2": { - "version": 2, - "hash": "3986c7d8b3cf9355a1fbdd99dfe3d872fc464420b91a668ea3288ee4481bab6d", - "action": "add" - } - }, - "LinkedObject": { - "1": { - "version": 1, - "hash": "8a117f8bf0282b8cf525de893404dbd88cc460a5a823d83006c2e82d88846f8d", - "action": "add" - } - }, - "BaseConfig": { - "1": { - "version": 1, - "hash": "f35db90c1c3bdd993b7b1e4f28327a8f6b1d0e43115b044167ca28d5c740178a", - "action": "add" - } - }, - "ServiceConfig": { - "1": { - "version": 1, - "hash": "be54130ee8e6502d91e07b14e96ff0dd779afc36617808067aae3a1a2055b6ea", - "action": "add" - } - }, - "LibConfig": { - "1": { - "version": 1, - "hash": "1a18afad23100c2979e9e2c4e6f046fd65dc2dead405feb57aea4c852b98df01", - "action": "add" - } - }, - "APIEndpoint": { - "1": { - "version": 1, - "hash": "c88e4405839e87fdfe90f86877ef2addd7be7281d36b7891636129fc8b3c1e8c", - "action": "add" - } - }, - "LibEndpoint": { - "1": { - "version": 1, - "hash": "214b81bda5fd00e7734227a4c75981d807f3741039be5b4fb007076cf2e638cc", - "action": "add" - } - }, - "SignedSyftAPICall": { - "1": { - "version": 1, - "hash": "2be9b74663354b4edeef3bc75dc67dc35bf24890c8a86a53a97957d470af06b2", - "action": "add" - } - }, - "SyftAPICall": { - "1": { - "version": 1, - "hash": "fa9520d29d9df56fb9d5d2080aecfc3be14c49e7267a3e5b9fd05ad1b0828e11", - "action": "add" - } - }, - "SyftAPIData": { - "1": { - "version": 1, - "hash": "16933cd10a2c45ad44826480e88e06ffbb7a7762c058c7e84da94ddc70478e7c", - "action": "add" - } - }, - "SyftAPI": { - "1": { - "version": 1, - "hash": "142a9bb5a4a63d5d072fbfbdbb48ec9535f1ae51e40f9d4f59760fb807c9a189", - "action": "add" - } - }, - "User": { - "1": { - "version": 1, - "hash": "21cb3659dc4ddd0b4d58c677f46feecac9f682ea36660f42845067b29b5ad8e7", - "action": "add" - } - }, - "UserUpdate": { - "1": { - "version": 1, - "hash": "f12c19dd38330f98fb2d9e0bf47e8bdcad3f6e1c085d2994d80224cf4b905984", - "action": "add" - } - }, - "UserCreate": { - "1": { - "version": 1, - "hash": "a9d7a52aaa7dcf622e317e899e1ded3023a94b86773ca16cd7d6a334fcffbe8b", - "action": "add" - } - }, - "UserSearch": { - "1": { - "version": 1, - "hash": "e697bf5b287cf29560c94c5851d8fb6ac74d2ce5c6200539a11a257bc150c75b", - "action": "add" - } - }, - "UserView": { - "1": { - "version": 1, - "hash": "fd624963af09c0e3471dfc49b2f09fafdd7521c8af804198015cc5455d7b56bc", - "action": "add" - } - }, - "UserViewPage": { - "1": { - "version": 1, - "hash": "c856f13ddc9a405b6d52482a6a273ffb038f988d589e7bb5cd68e0c8dd8668de", - "action": "add" - } - }, - "UserPrivateKey": { - "1": { - "version": 1, - "hash": "e06f237cdfd516caff0766b5f8ba4e4b2b85824c76910f1374ffce72173f8269", - "action": "add" - } - }, - "StoreConfig": { - "1": { - "version": 1, - "hash": "5cba60386142434e1d51e8dcd2fe9e0b58c6de994ccede76a527b0c893a23a97", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "1": { - "version": 1, - "hash": "ae77fb9f24004635a29abd979f5efa5a75780efe4fec1773cc533ac04aa57482", - "action": "add" - } - }, - "NodeSettings": { - "1": { - "version": 1, - "hash": "2735f660f23bfda8ffdf97b8ba3ab1bcdba01461b245832e9a9cb2a661ebcb74", - "action": "add" - } - }, - "HTTPConnection": { - "1": { - "version": 1, - "hash": "0a6e181e67978ce45a2414be4c4c39272ca6ed38a5fe9e9877619c13dc6aafef", - "action": "add" - } - }, - "PythonConnection": { - "1": { - "version": 1, - "hash": "6ab9e80d2208ce44fb6e5db8427234680f56b1ef8092be701d6d833b6e213926", - "action": "add" - } - }, - "DateTime": { - "1": { - "version": 1, - "hash": "34f9942a3f75988a1de6e46c40697698f2505336cf74282f683cfd3a7d6d4ec1", - "action": "add" - } - }, - "BlobFile": { - "1": { - "version": 1, - "hash": "229121eb07430f72c66281764a690270ff821a6551c036528b9e749b343bedc1", - "action": "add" - } - }, - "SecureFilePathLocation": { - "1": { - "version": 1, - "hash": "d3e6b95de5da0861922c302e9dabf443ee337b21da695d69c85bdb1e6f0ec45b", - "action": "add" - } - }, - "SeaweedSecureFilePathLocation": { - "1": { - "version": 1, - "hash": "0d5c382191c63e68b90237bb4e882abea6311ff1ba645adc784ee272de5f4623", - "action": "add" - } - }, - "BlobStorageEntry": { - "1": { - "version": 1, - "hash": "e010b50076f73fb934029a583310d13c3ec7abaa93520090fae3fb16457868fc", - "action": "add" - } - }, - "BlobStorageMetadata": { - "1": { - "version": 1, - "hash": "f1d0b4085276ba5f15a8cd81553374d465317d96e0a1c427e33e2d866e362d22", - "action": "add" - } - }, - "CreateBlobStorageEntry": { - "1": { - "version": 1, - "hash": "183fd3ed16b0687f01979b3b76bbb17a9b029bd39f11d46faf54b3e5205e9e2d", - "action": "add" - } - }, - "BlobRetrieval": { - "1": { - "version": 1, - "hash": "c55f486ea79e9e96c047b32987e555357248cd30f18e1a8244fefe73457e5b9e", - "action": "add" - } - }, - "SyftObjectRetrieval": { - "1": { - "version": 1, - "hash": "da96484c8e57fc060c4dba29ef4e375284720dd05f1ed2ee60e1df52450437cd", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "1": { - "version": 1, - "hash": "656a44e91ce560056679b459c9fd33b55a5c2b0754e455099a456074e2e14822", - "action": "add" - } - }, - "BlobDeposit": { - "1": { - "version": 1, - "hash": "23a73cc9bff8e6833681e55d872121b6d54520d76f9426fd199a19eb847deea4", - "action": "add" - } - }, - "WorkerSettings": { - "1": { - "version": 1, - "hash": "2fe75dd39cb6367bd9cea2c7f59e40a85bbbcfc44f518572f377ef25c3acd205", - "action": "add" - } - }, - "HTTPNodeRoute": { - "1": { - "version": 1, - "hash": "b4662c11f7487ab907caf3cadf8c33eca2e0fbd640ed1fba872c3f982b749986", - "action": "add" - } - }, - "PythonNodeRoute": { - "1": { - "version": 1, - "hash": "d8f268996e5443a248cc2eb5f4a568591e5f9e18952f8519f3f995e1f1f486e4", - "action": "add" - } - }, - "EnclaveMetadata": { - "1": { - "version": 1, - "hash": "6ad19306231ebbb4d8b0c4e4cc82b881298835862a2c48f2358db47215a020ac", - "action": "add" - } - }, - "DataSubject": { - "1": { - "version": 1, - "hash": "287ab306e1c4ebe0c883600ffd52dc734d08f0217b2a961afbdc6c7883bf4ccd", - "action": "add" - } - }, - "DataSubjectCreate": { - "1": { - "version": 1, - "hash": "8b3487af42ba249d7cf705c7f66a09dd832c055814091def34a38133f8148158", - "action": "add" - } - }, - "DataSubjectMemberRelationship": { - "1": { - "version": 1, - "hash": "6aed0e1548d6b09bfac132040f7315f49c13855a6bc147a4f1aa4ce09572b952", - "action": "add" - } - }, - "Contributor": { - "1": { - "version": 1, - "hash": "3e27f1ea23cecfc3e0956743ae832f0f70ecd144f9df3f128b123e9347944afa", - "action": "add" - } - }, - "MarkdownDescription": { - "1": { - "version": 1, - "hash": "506d47fa85728ad444f2fa657e39b341bc759d21a80325831b1e84926ee748f1", - "action": "add" - } - }, - "Asset": { - "1": { - "version": 1, - "hash": "f8370e8dd87df6a05bde1226c52c3ce6e7de636e6032341b977efe5e601a731d", - "action": "add" - } - }, - "CreateAsset": { - "1": { - "version": 1, - "hash": "c3a82856694adcb3c916a961776c2fa3bc90a7ccb50b8b9d42776810266ed241", - "action": "add" - } - }, - "Dataset": { - "1": { - "version": 1, - "hash": "d7a45bf9128472715e63a67192d4181672eadd8b5873d2ba96c2df3b2be749b9", - "action": "add" - } - }, - "DatasetPageView": { - "1": { - "version": 1, - "hash": "c7da1fac28f70c19d199f123b04fbd4a9c7681e3846dee0de70ea926a9440a2e", - "action": "add" - } - }, - "CreateDataset": { - "1": { - "version": 1, - "hash": "6a31617de99738bc176f19397884581051443c7c7ba952c983929fed688a9d7e", - "action": "add" - } - }, - "ActionDataEmpty": { - "1": { - "version": 1, - "hash": "fc83d91ac6ba78c749a4e7e16c1aa117baaae62b5f33156ded4e5a79b9532a98", - "action": "add" - } - }, - "ActionFileData": { - "1": { - "version": 1, - "hash": "47a0a5f9fb48de09885d4f9a6c5a5a05f4dd04575ea42df7dea0cab02817063f", - "action": "add" - } - }, - "Action": { - "1": { - "version": 1, - "hash": "204b3c97b41f208ecb17c541af55e171675faaefa10c103b405a4077b0226a7e", - "action": "add" - } - }, - "AnyActionObject": { - "1": { - "version": 1, - "hash": "f11bd6135ba46247c06bfa8a3c6b7f2e540a1033afe0512c3359e31eb3d59388", - "action": "add" - } - }, - "TwinObject": { - "1": { - "version": 1, - "hash": "8f6abd756d41f9639865c6fd55e6cc2ec6b89fd18bac3c77bf3a0502d81ca8ec", - "action": "add" - } - }, - "ExactMatch": { - "1": { - "version": 1, - "hash": "f71a495d2452190596fe435eaf59b07ba390d7895f6c808a2f87a1719227ba9c", - "action": "add" - } - }, - "OutputHistory": { - "1": { - "version": 1, - "hash": "24f282dd181ecc7a05c93c02dff84dff45aa52928e2331a15ef667645e9bcf0b", - "action": "add" - } - }, - "OutputPolicyExecuteCount": { - "1": { - "version": 1, - "hash": "95d198d2021dbaf9965df39a4571ad06e787684ff79bd6e8a720c47825eebd7e", - "action": "add" - } - }, - "OutputPolicyExecuteOnce": { - "1": { - "version": 1, - "hash": "b8bc1fea2e9b51b5dfc3cbd4b0a131cb2a5b1fe746b5e329395bf319b38bf9b2", - "action": "add" - } - }, - "UserPolicy": { - "1": { - "version": 1, - "hash": "ec3f8ea84e2b05ce56db8f35cff14f78569e921f566477581b3299eb6a9fa053", - "action": "add" - } - }, - "SubmitUserPolicy": { - "1": { - "version": 1, - "hash": "3147a4e45270367a40ca8c4b7b502c8683200d123828720365521b90b2775794", - "action": "add" - } - }, - "UserCode": { - "1": { - "version": 1, - "hash": "ad509fccb7bb5c00971453c1f1235785f40d5d5b3eee3df1dc9edafc758c7193", - "action": "add" - } - }, - "SubmitUserCode": { - "1": { - "version": 1, - "hash": "b4a919054beb2488c7b4839d60c494400d791112adf009631dce13787cd58e78", - "action": "add" - } - }, - "UserCodeExecutionResult": { - "1": { - "version": 1, - "hash": "9aab24def6616ac908ef1100b654b4dbeca1ea4cfff229c53d6b14491c795555", - "action": "add" - } - }, - "CodeHistory": { - "1": { - "version": 1, - "hash": "bbbd59801717a98691034a37c9de388c7a65db2721bd0c5c9ff0dbe8fc70be67", - "action": "add" - } - }, - "CodeHistoryView": { - "1": { - "version": 1, - "hash": "142e78415da10dae739e923d39ce511496a3c7b31e8c4553a6cbb1907c126a3a", - "action": "add" - } - }, - "CodeHistoriesDict": { - "1": { - "version": 1, - "hash": "453af101a1de8e37a0bfacf22077c35994718791f295956f1f06727f8d9b7fe8", - "action": "add" - } - }, - "UsersCodeHistoriesDict": { - "1": { - "version": 1, - "hash": "cf8ef92a08cabb068e4616c1700451b75ba4d511343830e3e56b65882fb784aa", - "action": "add" - } - }, - "NodePeer": { - "1": { - "version": 1, - "hash": "8920d9e456fd1a13f46c0528a8fe3fec8af46440b3eb89e8d7d48ad64babee1e", - "action": "add" - } - }, - "CommandReport": { - "1": { - "version": 1, - "hash": "a81fe3d0cc5796f45e925d09b6e8132b79fe5df0f341d55b3870c109f8c1e19d", - "action": "add" - } - }, - "CommandResult": { - "1": { - "version": 1, - "hash": "14b48d4a1cbc5f5ae1e5e74834e7f1002adae7b2766000ea822f180fd7cd39db", - "action": "add" - } - }, - "VPNClientConnection": { - "1": { - "version": 1, - "hash": "7d44711978f930d42c06d04483abcdb1f230782a8f16f4feb5efb7b2b2093bb2", - "action": "add" - } - }, - "HeadscaleAuthToken": { - "1": { - "version": 1, - "hash": "0b363503b6c611b44e33561a2a6db3f260cfd4bbc5f4245deac5052fd5149803", - "action": "add" - } - }, - "TailscalePeer": { - "1": { - "version": 1, - "hash": "8ff85aa2b913a6bb206b9de0495d9f74a17f55823891da98cb6fdbe78f46a44b", - "action": "add" - } - }, - "TailscaleStatus": { - "1": { - "version": 1, - "hash": "ed262f4b9a569d9933f4a86cd2caa2ce213fc7a2319a1371f6a3cf3ccf884c8a", - "action": "add" - } - }, - "OnDiskBlobDeposit": { - "1": { - "version": 1, - "hash": "da3abda453def0d7c70c8a5dfcc3c8d00dd6822f60ddc01be3bdead4b0b5b482", - "action": "add" - } - }, - "SeaweedFSBlobDeposit": { - "1": { - "version": 1, - "hash": "bcbec5dcdc06a0c87f89a10a6a8809706f24cedd97b5f850f8b48840a1f41941", - "action": "add" - } - }, - "DictStoreConfig": { - "1": { - "version": 1, - "hash": "b925934f43b72ddf87c420667e16b7d8351f4294997a4a01a43c5958e5e7b465", - "action": "add" - } - }, - "NumpyArrayObject": { - "1": { - "version": 1, - "hash": "d47a376401d92d47e5748e34f98ee270f8ebfd52cffbe6271b5faa8193e728c5", - "action": "add" - } - }, - "NumpyScalarObject": { - "1": { - "version": 1, - "hash": "952bebb4dd3e3641c33b4ebcf2c051dbdebae5f1bf3b7b63ea89423360705411", - "action": "add" - } - }, - "NumpyBoolObject": { - "1": { - "version": 1, - "hash": "b7a231baaa4b1f519d70c5afb15b4a9b7232f1128f7fd3709c1ea8b7345f8c6c", - "action": "add" - } - }, - "PandasDataframeObject": { - "1": { - "version": 1, - "hash": "ff9d6c1884413f712d95d29190e30938b33de19e11dff9f88d9b89c51499cac5", - "action": "add" - } - }, - "PandasSeriesObject": { - "1": { - "version": 1, - "hash": "69eadfe14e5a7035767d2538e2db8775da6569cf5127f58d13315c4b85e5603d", - "action": "add" - } - }, - "ReplyNotification": { - "1": { - "version": 1, - "hash": "ce1e2a6b0d618478d3b1b992e4c8605817919c88a4884ca0540e0886ecdb8215", - "action": "add" - } - }, - "Notification": { - "1": { - "version": 1, - "hash": "1e5a65d91e27bf53d5b2ed0b45d9cee0cf77104b7111f99223194ceb0d0137fe", - "action": "add" - } - }, - "CreateNotification": { - "1": { - "version": 1, - "hash": "6858b743ac07d853a0302dc64de0e7d852135e2564ebad325d5ff35d17c29f6f", - "action": "add" - } - }, - "Change": { - "1": { - "version": 1, - "hash": "2c470ff8aa076b88105640ce79d361a9b439927e501c238fa33ac7c1c45aa2c0", - "action": "add" - } - }, - "ChangeStatus": { - "1": { - "version": 1, - "hash": "7571229e92d8d52a0e90fa8856169b41045b42b50568b266823bdcea838dfb39", - "action": "add" - } - }, - "ActionStoreChange": { - "1": { - "version": 1, - "hash": "cf527995930cce09d90806713d30301493240079319bcc85e894428aee46017e", - "action": "add" - } - }, - "Request": { - "1": { - "version": 1, - "hash": "340f4ac61ccbf6f566666327d6bca043dcd643e6f8e24897ef10bd6312e74995", - "action": "add" - } - }, - "RequestInfo": { - "1": { - "version": 1, - "hash": "d571708de3c187ca5840c64784d99f7bfce8f33aa2ba48f9d56b824564551654", - "action": "add" - } - }, - "RequestInfoFilter": { - "1": { - "version": 1, - "hash": "c336af8d474071eb61e5f467513753e64d4e153e12892f9c6875b235057b0f0a", - "action": "add" - } - }, - "SubmitRequest": { - "1": { - "version": 1, - "hash": "1870ce541169eab04cb69d3ae88ea30dc2fcdd997b620567ca9d87936d9600cf", - "action": "add" - } - }, - "ObjectMutation": { - "1": { - "version": 1, - "hash": "275d9cf180904d1e34e1f3d7e838105e843127faf5a64029a1cf85d00234b8c9", - "action": "add" - } - }, - "EnumMutation": { - "1": { - "version": 1, - "hash": "3a1d1b47e0cdb5094298bce58bc9b76ecb66064459504f910c7d755eb1d5e276", - "action": "add" - } - }, - "UserCodeStatusChange": { - "1": { - "version": 1, - "hash": "928dd4ceeb4858b18b806ca62b49a840f54269f7866744c9aa6edb0af9d7dfc1", - "action": "add" - } - }, - "SyftObjectMigrationState": { - "1": { - "version": 1, - "hash": "194fd4dc57764d454ac763d256e3bfcd2b0040a134daf9ee0d8e5ac7ab21abbc", - "action": "add" - } - }, - "ProjectThreadMessage": { - "1": { - "version": 1, - "hash": "9f8b11d603caae6d0e0f28957949dfc57c26fec9685f2c80501330b1d6bae665", - "action": "add" - } - }, - "ProjectMessage": { - "1": { - "version": 1, - "hash": "d678beafc33f7e7df7e771a82d5cba6d5a36728a033d3976b6e5998726733d27", - "action": "add" - } - }, - "ProjectRequestResponse": { - "1": { - "version": 1, - "hash": "51b5a5d8cf0bde45abd2bd3a4411f93769fa542666a137ce9611d38fb48ffb4c", - "action": "add" - } - }, - "ProjectRequest": { - "1": { - "version": 1, - "hash": "9eff1b3cc74c9706722363abb4062fc77c0a4f093d448b795ad662861649f111", - "action": "add" - } - }, - "AnswerProjectPoll": { - "1": { - "version": 1, - "hash": "f538a4fcae286cbc9755f51e2f2ce8809d66ce5d66f50173ef1824f89ce9b51d", - "action": "add" - } - }, - "ProjectPoll": { - "1": { - "version": 1, - "hash": "b456a699a249fd3fffe9739cdd9ec3ee8c05e59b2d9872ad9864167d78088091", - "action": "add" - } - }, - "Project": { - "1": { - "version": 1, - "hash": "bf59890e92d95b362cc7ef9c3d7fa6a1815978e02111a30cbcb047239e57d61e", - "action": "add" - } - }, - "ProjectSubmit": { - "1": { - "version": 1, - "hash": "5084844056ddefcea7fc634dd9945c03ef6d030bcd8f63aa07fe11fea0a5389f", - "action": "add" - } - }, - "QueueItem": { - "1": { - "version": 1, - "hash": "1d53446d5cd788120b15ea6b108a4a7abd480377370be7128f44297f8fb00b76", - "action": "add" - } - }, - "ZMQClientConfig": { - "1": { - "version": 1, - "hash": "e3153f18c9fd04cf07b844153d093c8a090baac4c99d71ecd6491961e7f1dafb", - "action": "add" - } - }, - "SQLiteStoreConfig": { - "1": { - "version": 1, - "hash": "f4497d4a972814e3c27d610487e7ab578d8353dd1635f11aab4d5cbe31f721a8", - "action": "add" - } - }, - "Plan": { - "1": { - "version": 1, - "hash": "41713fc89a2cab7db592df6cd1c45e1309f86a50a8f531ddaf4052947186b0e0", - "action": "add" - } - } - } - } -} +{} diff --git a/packages/syft/src/syft/service/metadata/migrations.py b/packages/syft/src/syft/service/metadata/migrations.py index 58d09021eb2..dd6200b97a2 100644 --- a/packages/syft/src/syft/service/metadata/migrations.py +++ b/packages/syft/src/syft/service/metadata/migrations.py @@ -2,10 +2,10 @@ from ...types.syft_migration import migrate from ...types.transforms import rename from .node_metadata import NodeMetadata -from .node_metadata import NodeMetadataV2 +from .node_metadata import NodeMetadataV1 -@migrate(NodeMetadata, NodeMetadataV2) +@migrate(NodeMetadataV1, NodeMetadata) def upgrade_metadata_v1_to_v2(): return [ rename("highest_object_version", "highest_version"), @@ -13,7 +13,7 @@ def upgrade_metadata_v1_to_v2(): ] -@migrate(NodeMetadataV2, NodeMetadata) +@migrate(NodeMetadata, NodeMetadataV1) def downgrade_metadata_v2_to_v1(): return [ rename("highest_version", "highest_object_version"), diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index 91dfa30131a..ee6fa62a49f 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -3,19 +3,16 @@ # stdlib from typing import Callable -from typing import Dict from typing import List from typing import Optional # third party from packaging import version from pydantic import BaseModel -from pydantic import root_validator # relative from ...abstract_node import NodeType from ...node.credentials import SyftVerifyKey -from ...protocol.data_protocol import get_data_protocol from ...serde.serializable import serializable from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 @@ -64,7 +61,7 @@ class NodeMetadataUpdate(SyftObject): @serializable() -class NodeMetadata(SyftObject): +class NodeMetadataV1(SyftObject): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_1 @@ -93,7 +90,7 @@ def check_version(self, client_version: str) -> bool: @serializable() -class NodeMetadataV2(SyftObject): +class NodeMetadata(SyftObject): __canonical_name__ = "NodeMetadata" __version__ = SYFT_OBJECT_VERSION_2 @@ -139,24 +136,26 @@ class NodeMetadataJSON(BaseModel, StorableObjectType): admin_email: str node_side_type: str show_warnings: bool - supported_protocols: List + supported_protocols: List = [] - @root_validator(pre=True) - def add_protocol_versions(cls, values: Dict) -> Dict: - if "supported_protocols" not in values: - data_protocol = get_data_protocol() - values["supported_protocols"] = data_protocol.supported_protocols - return values + # breaks Object of type UID is not JSON serializable + # @root_validator(pre=True) + # def add_protocol_versions(cls, values: Dict) -> Dict: + # if "supported_protocols" not in values: + # data_protocol = get_data_protocol() + # values["supported_protocols"] = data_protocol.supported_protocols + # return values def check_version(self, client_version: str) -> bool: - return check_version( - client_version=client_version, - server_version=self.syft_version, - server_name=self.name, - ) + return True + # return check_version( + # client_version=client_version, + # server_version=self.syft_version, + # server_name=self.name, + # ) -@transform(NodeMetadataV2, NodeMetadataJSON) +@transform(NodeMetadata, NodeMetadataJSON) def metadata_to_json() -> List[Callable]: return [ drop(["__canonical_name__"]), @@ -165,7 +164,7 @@ def metadata_to_json() -> List[Callable]: ] -@transform(NodeMetadataJSON, NodeMetadataV2) +@transform(NodeMetadataJSON, NodeMetadata) def json_to_metadata() -> List[Callable]: return [ drop(["metadata_version", "supported_protocols"]), diff --git a/packages/syft/tests/syft/settings/fixtures.py b/packages/syft/tests/syft/settings/fixtures.py index ab8ee071f59..3d3c3abee8b 100644 --- a/packages/syft/tests/syft/settings/fixtures.py +++ b/packages/syft/tests/syft/settings/fixtures.py @@ -7,6 +7,7 @@ # syft absolute from syft.__init__ import __version__ from syft.abstract_node import NodeSideType +from syft.abstract_node import NodeType from syft.service.metadata.node_metadata import NodeMetadataJSON from syft.service.settings.settings import NodeSettings from syft.service.settings.settings import NodeSettingsUpdate @@ -51,13 +52,14 @@ def metadata_json(faker) -> NodeMetadataJSON: name=faker.name(), id=faker.text(), verify_key=faker.text(), - highest_object_version=SYFT_OBJECT_VERSION_1, - lowest_object_version=SYFT_OBJECT_VERSION_1, + highest_version=SYFT_OBJECT_VERSION_1, + lowest_version=SYFT_OBJECT_VERSION_1, syft_version=__version__, signup_enabled=False, admin_email="info@openmined.org", node_side_type=NodeSideType.LOW_SIDE.value, show_warnings=False, + node_type=NodeType.DOMAIN.value, ) diff --git a/packages/syft/tests/syft/settings/settings_service_test.py b/packages/syft/tests/syft/settings/settings_service_test.py index 967c00ececa..ba1aac7b0f7 100644 --- a/packages/syft/tests/syft/settings/settings_service_test.py +++ b/packages/syft/tests/syft/settings/settings_service_test.py @@ -14,7 +14,7 @@ from syft.node.credentials import SyftSigningKey from syft.node.credentials import SyftVerifyKey from syft.service.context import AuthedServiceContext -from syft.service.metadata.node_metadata import NodeMetadata +from syft.service.metadata.node_metadata import NodeMetadataV1 from syft.service.response import SyftError from syft.service.response import SyftSuccess from syft.service.settings.settings import NodeSettings @@ -227,7 +227,7 @@ def test_settings_allow_guest_registration( # Create a new worker verify_key = SyftSigningKey.generate().verify_key - mock_node_metadata = NodeMetadata( + mock_node_metadata = NodeMetadataV1( name=faker.name(), verify_key=verify_key, highest_object_version=1, @@ -309,7 +309,7 @@ def get_mock_client(faker, root_client, role): return guest_client verify_key = SyftSigningKey.generate().verify_key - mock_node_metadata = NodeMetadata( + mock_node_metadata = NodeMetadataV1( name=faker.name(), verify_key=verify_key, highest_object_version=1, From 69fda5c496db03cecde9d2074d316a1f6eeb8b38 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Mon, 2 Oct 2023 16:19:30 +1000 Subject: [PATCH 36/67] Fixed missing __canonical_name__ fields --- notebooks/Experimental/Data Migration.ipynb | 2 +- notebooks/Experimental/Untitled.ipynb | 1085 +++++++++++++++++ packages/syft/src/syft/client/api.py | 13 +- packages/syft/src/syft/client/client.py | 6 +- .../syft/src/syft/protocol/data_protocol.py | 19 +- .../src/syft/protocol/protocol_version.json | 775 +++++++++++- .../src/syft/service/action/action_graph.py | 4 + .../syft/service/metadata/node_metadata.py | 26 +- .../src/syft/store/mongo_document_store.py | 1 + .../tests/syft/stores/store_mocks_test.py | 4 +- 10 files changed, 1907 insertions(+), 28 deletions(-) create mode 100644 notebooks/Experimental/Untitled.ipynb diff --git a/notebooks/Experimental/Data Migration.ipynb b/notebooks/Experimental/Data Migration.ipynb index 03251ecf02c..42c8a9a551d 100644 --- a/notebooks/Experimental/Data Migration.ipynb +++ b/notebooks/Experimental/Data Migration.ipynb @@ -978,7 +978,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.11.1" }, "toc": { "base_numbering": 1, diff --git a/notebooks/Experimental/Untitled.ipynb b/notebooks/Experimental/Untitled.ipynb new file mode 100644 index 00000000000..9de8340e952 --- /dev/null +++ b/notebooks/Experimental/Untitled.ipynb @@ -0,0 +1,1085 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "853b5397-fd94-4bce-9ae5-ae16d9c201df", + "metadata": {}, + "outputs": [], + "source": [ + "import syft as sy\n", + "from syft.types.datetime import DateTime\n", + "from syft.service.metadata.node_metadata import NodeMetadata, NodeMetadataV1\n", + "from syft.service.metadata.node_metadata import NodeMetadataJSON" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d82eecfc-9d40-4fbb-a868-a1d175a745db", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.protocol.data_protocol import DataProtocol" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "0fa4a070-8485-4f44-8d96-f3d28ede2fb3", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.service.service import BaseConfig" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "a5632f9f-a175-4e96-b7ef-7e825ccfc50c", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.store.dict_document_store import DictStoreConfig" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "0b71f13e-00d9-4e7a-8b9f-b9d7f0aff1f3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess: 110 Protocol Updates Staged to dev

" + ], + "text/plain": [ + "SyftSuccess: 110 Protocol Updates Staged to dev" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sy.stage_protocol_changes()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "14959468-9727-47d1-a89c-e0902cd9f4be", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess: 0 Protocol Updates Staged to dev

" + ], + "text/plain": [ + "SyftSuccess: 0 Protocol Updates Staged to dev" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sy.stage_protocol_changes()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "019568c9-8a44-4cf7-a879-cae67e808170", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
SyftSuccess: Protocol Updated to 1

" + ], + "text/plain": [ + "SyftSuccess: Protocol Updated to 1" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sy.bump_protocol_version()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e892015-d737-4f8a-babe-d61c5ffd9f9e", + "metadata": {}, + "outputs": [], + "source": [ + "x = sy.get_data_protocol()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06da56ae-db2a-4723-8d70-9ad8b8edc064", + "metadata": {}, + "outputs": [], + "source": [ + "x.supported_protocols" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e064841-d1dc-4048-8477-47dc0a75eb3e", + "metadata": {}, + "outputs": [], + "source": [ + "x.state" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56dfe0f3-74e0-40e5-af26-42a7a12971b7", + "metadata": {}, + "outputs": [], + "source": [ + "x.state[\"PartialSyftObject\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8c8d38a1-4f50-4607-9752-c202dd3e231b", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d795af5b-ebf6-4454-8379-194976611a3d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "58791646-a7d6-4925-b6d3-f804b5df8e51", + "metadata": {}, + "outputs": [], + "source": [ + "import re" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8af3d1cb-fb45-4ae1-aa69-f4ebdf9ffa3d", + "metadata": {}, + "outputs": [], + "source": [ + "def natural_key(key: int | str) -> list[int]:\n", + " \"\"\"Define key for natural ordering of strings.\"\"\"\n", + " if isinstance(key, int):\n", + " key = str(key)\n", + " return [int(s) if s.isdigit() else s for s in re.split(\"(\\d+)\", key)]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d61312af-7a8e-4290-8a4d-3477b5990f29", + "metadata": {}, + "outputs": [], + "source": [ + "x = [\"dev\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0d2ac8e-4640-4d19-97e0-947832078921", + "metadata": {}, + "outputs": [], + "source": [ + "sorted(x, key=natural_key)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e391bf8a-9870-4d36-872c-96969878a515", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5c7a5f3f-09b4-43d7-abfb-81fa3f779eed", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4b6b77b-b115-4e3c-a4a0-592c035edc1f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "faf2e3ff-dd52-4105-a8f6-518594dcd7d0", + "metadata": {}, + "outputs": [], + "source": [ + "raise Exception" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1481115f-2f91-4809-87cb-1bbea37d57cc", + "metadata": {}, + "outputs": [], + "source": [ + "import re\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b51c2dc1-8c7c-4884-b7ab-abb190816e0e", + "metadata": {}, + "outputs": [], + "source": [ + "def natural_key(key: int | str) -> list[int]:\n", + " \"\"\"Define key for natural ordering of strings.\"\"\"\n", + " if isinstance(key, int):\n", + " key = str(key)\n", + " return [int(s) if s.isdigit() else s for s in re.split(\"(\\d+)\", key)]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "af95e3b8-daf9-4b3a-8d61-d185698b37da", + "metadata": {}, + "outputs": [], + "source": [ + "a = [\"1\", \"dev\", \"2\", \"11\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed9e1877-6569-47e3-acc5-e85f3e2b19a4", + "metadata": {}, + "outputs": [], + "source": [ + "b = sorted(a, key=natural_key)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb05d920-19f1-48ee-9f7e-f5de51ade4ce", + "metadata": {}, + "outputs": [], + "source": [ + "b" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "71ed4308-8045-42f0-9900-fa2cafb6f227", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46a024d7-90a3-4d95-ba5e-1de1e93bc4ad", + "metadata": {}, + "outputs": [], + "source": [ + "DataProtocol._calculate_object_hash(BaseConfig)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1ac72f4-42ff-4975-b70f-fe98d79195b1", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.serde.recursive import TYPE_BANK" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e78c45a3-b59a-43c5-9719-9bccb0ca75dc", + "metadata": {}, + "outputs": [], + "source": [ + "g = {\"1\":\"a\", \"22\":\"b\", \"11\":\"c\"}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7fbdf02a-f7be-4b92-8ec8-074accbf7c4f", + "metadata": {}, + "outputs": [], + "source": [ + "max(g)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b07b2cb0-d075-45c4-ab48-5716c77da5b8", + "metadata": {}, + "outputs": [], + "source": [ + "range(2, 23)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2abfbcb1-af70-4227-b978-002e1979ca17", + "metadata": {}, + "outputs": [], + "source": [ + "(\n", + "nonrecursive,\n", + "serialize,\n", + "deserialize,\n", + "attribute_list,\n", + "exclude_attrs_list,\n", + "serde_overrides,\n", + "hash_exclude_attrs,\n", + "cls,\n", + "attribute_types,\n", + "version,\n", + ") = TYPE_BANK[\"syft.service.service.BaseConfig\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a8084d8-e241-4372-9686-5cb5b0286cac", + "metadata": {}, + "outputs": [], + "source": [ + "cls" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b07f2ea-adac-42c5-9ac7-a18f64fc8c05", + "metadata": {}, + "outputs": [], + "source": [ + "cls == BaseConfig" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0edc9c91-73cc-4c21-bbb7-20670e6ef292", + "metadata": {}, + "outputs": [], + "source": [ + "DataProtocol._calculate_object_hash(cls)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0710f746-74f8-4b9d-b25b-9662517816f3", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# Exception: BaseConfig version 1 hash has changed.\n", + "\n", + "# 332964663526a4d6ddc64bc9d77aa9324000bb721520a5465229235523fe8c7d not in \n", + "\n", + "# dict_values([{'version': 1, 'hash': 'f35db90c1c3bdd993b7b1e4f28327a8f6b1d0e43115b044167ca28d5c740178a', 'action': 'add'}]). You probably need to bump the version number.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cd615d46-f6f7-4888-bf1d-1e264b380c11", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7389ca79-d161-4e87-9a27-f24a6b3a2a40", + "metadata": {}, + "outputs": [], + "source": [ + "raise Exception" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "73f5e981-30a8-43f2-b762-69954dcc7724", + "metadata": {}, + "outputs": [], + "source": [ + "x = {'PartialSyftObject': {1: 'fa2770d76f3dd904e2c1c4a78c9ba10e5ac33b4dd7a4d61faa45a22078e94aa8'}, 'NodeMetadataUpdate': {1: 'b73adf379a179f6ef9a6d5ee9477e69697a47ad25579bab46cc1fc44ec35ba04'}, 'NodeMetadata': {2: '3986c7d8b3cf9355a1fbdd99dfe3d872fc464420b91a668ea3288ee4481bab6d'}, 'LinkedObject': {1: '8a117f8bf0282b8cf525de893404dbd88cc460a5a823d83006c2e82d88846f8d'}, 'BaseConfig': {1: '32b71d31ecba9a29c82fc4ca0e6568ac5c97204501ffdfef079c93ffbaf6b278'}, 'APIEndpoint': {1: 'c88e4405839e87fdfe90f86877ef2addd7be7281d36b7891636129fc8b3c1e8c'}, 'LibEndpoint': {1: '214b81bda5fd00e7734227a4c75981d807f3741039be5b4fb007076cf2e638cc'}, 'SignedSyftAPICall': {1: '2be9b74663354b4edeef3bc75dc67dc35bf24890c8a86a53a97957d470af06b2'}, 'SyftAPICall': {1: 'fa9520d29d9df56fb9d5d2080aecfc3be14c49e7267a3e5b9fd05ad1b0828e11'}, 'SyftAPIData': {1: '16933cd10a2c45ad44826480e88e06ffbb7a7762c058c7e84da94ddc70478e7c'}, 'SyftAPI': {1: '142a9bb5a4a63d5d072fbfbdbb48ec9535f1ae51e40f9d4f59760fb807c9a189'}, 'User': {1: '21cb3659dc4ddd0b4d58c677f46feecac9f682ea36660f42845067b29b5ad8e7'}, 'UserUpdate': {1: 'f12c19dd38330f98fb2d9e0bf47e8bdcad3f6e1c085d2994d80224cf4b905984'}, 'UserCreate': {1: 'a9d7a52aaa7dcf622e317e899e1ded3023a94b86773ca16cd7d6a334fcffbe8b'}, 'UserSearch': {1: 'e697bf5b287cf29560c94c5851d8fb6ac74d2ce5c6200539a11a257bc150c75b'}, 'UserView': {1: 'fd624963af09c0e3471dfc49b2f09fafdd7521c8af804198015cc5455d7b56bc'}, 'UserViewPage': {1: 'c856f13ddc9a405b6d52482a6a273ffb038f988d589e7bb5cd68e0c8dd8668de'}, 'UserPrivateKey': {1: 'e06f237cdfd516caff0766b5f8ba4e4b2b85824c76910f1374ffce72173f8269'}, 'StoreConfig': {1: '91790b136dba4ddbb5bd8dfcd337a4a8e448cc26b338bf15bf066ba97be56924'}, 'NodeSettingsUpdate': {1: 'ae77fb9f24004635a29abd979f5efa5a75780efe4fec1773cc533ac04aa57482'}, 'NodeSettings': {1: '2735f660f23bfda8ffdf97b8ba3ab1bcdba01461b245832e9a9cb2a661ebcb74'}, 'HTTPConnection': {1: '0a6e181e67978ce45a2414be4c4c39272ca6ed38a5fe9e9877619c13dc6aafef'}, 'PythonConnection': {1: '6ab9e80d2208ce44fb6e5db8427234680f56b1ef8092be701d6d833b6e213926'}, 'DateTime': {1: '34f9942a3f75988a1de6e46c40697698f2505336cf74282f683cfd3a7d6d4ec1'}, 'BlobFile': {1: '229121eb07430f72c66281764a690270ff821a6551c036528b9e749b343bedc1'}, 'SecureFilePathLocation': {1: 'd3e6b95de5da0861922c302e9dabf443ee337b21da695d69c85bdb1e6f0ec45b'}, 'SeaweedSecureFilePathLocation': {1: '0d5c382191c63e68b90237bb4e882abea6311ff1ba645adc784ee272de5f4623'}, 'BlobStorageEntry': {1: 'e010b50076f73fb934029a583310d13c3ec7abaa93520090fae3fb16457868fc'}, 'BlobStorageMetadata': {1: 'f1d0b4085276ba5f15a8cd81553374d465317d96e0a1c427e33e2d866e362d22'}, 'CreateBlobStorageEntry': {1: '183fd3ed16b0687f01979b3b76bbb17a9b029bd39f11d46faf54b3e5205e9e2d'}, 'BlobRetrieval': {1: 'c55f486ea79e9e96c047b32987e555357248cd30f18e1a8244fefe73457e5b9e'}, 'SyftObjectRetrieval': {1: 'da96484c8e57fc060c4dba29ef4e375284720dd05f1ed2ee60e1df52450437cd'}, 'BlobRetrievalByURL': {1: '656a44e91ce560056679b459c9fd33b55a5c2b0754e455099a456074e2e14822'}, 'BlobDeposit': {1: '23a73cc9bff8e6833681e55d872121b6d54520d76f9426fd199a19eb847deea4'}, 'WorkerSettings': {1: '2fe75dd39cb6367bd9cea2c7f59e40a85bbbcfc44f518572f377ef25c3acd205'}, 'HTTPNodeRoute': {1: 'b4662c11f7487ab907caf3cadf8c33eca2e0fbd640ed1fba872c3f982b749986'}, 'PythonNodeRoute': {1: 'd8f268996e5443a248cc2eb5f4a568591e5f9e18952f8519f3f995e1f1f486e4'}, 'EnclaveMetadata': {1: '6ad19306231ebbb4d8b0c4e4cc82b881298835862a2c48f2358db47215a020ac'}, 'DataSubject': {1: '287ab306e1c4ebe0c883600ffd52dc734d08f0217b2a961afbdc6c7883bf4ccd'}, 'DataSubjectCreate': {1: '8b3487af42ba249d7cf705c7f66a09dd832c055814091def34a38133f8148158'}, 'DataSubjectMemberRelationship': {1: '6aed0e1548d6b09bfac132040f7315f49c13855a6bc147a4f1aa4ce09572b952'}, 'Contributor': {1: '3e27f1ea23cecfc3e0956743ae832f0f70ecd144f9df3f128b123e9347944afa'}, 'MarkdownDescription': {1: '506d47fa85728ad444f2fa657e39b341bc759d21a80325831b1e84926ee748f1'}, 'Asset': {1: 'f8370e8dd87df6a05bde1226c52c3ce6e7de636e6032341b977efe5e601a731d'}, 'CreateAsset': {1: 'c3a82856694adcb3c916a961776c2fa3bc90a7ccb50b8b9d42776810266ed241'}, 'Dataset': {1: 'd7a45bf9128472715e63a67192d4181672eadd8b5873d2ba96c2df3b2be749b9'}, 'DatasetPageView': {1: 'c7da1fac28f70c19d199f123b04fbd4a9c7681e3846dee0de70ea926a9440a2e'}, 'CreateDataset': {1: '6a31617de99738bc176f19397884581051443c7c7ba952c983929fed688a9d7e'}, 'ActionDataEmpty': {1: 'fc83d91ac6ba78c749a4e7e16c1aa117baaae62b5f33156ded4e5a79b9532a98'}, 'ActionFileData': {1: '47a0a5f9fb48de09885d4f9a6c5a5a05f4dd04575ea42df7dea0cab02817063f'}, 'Action': {1: '204b3c97b41f208ecb17c541af55e171675faaefa10c103b405a4077b0226a7e'}, 'AnyActionObject': {1: 'f11bd6135ba46247c06bfa8a3c6b7f2e540a1033afe0512c3359e31eb3d59388'}, 'TwinObject': {1: '8f6abd756d41f9639865c6fd55e6cc2ec6b89fd18bac3c77bf3a0502d81ca8ec'}, 'ExactMatch': {1: 'f71a495d2452190596fe435eaf59b07ba390d7895f6c808a2f87a1719227ba9c'}, 'OutputHistory': {1: '24f282dd181ecc7a05c93c02dff84dff45aa52928e2331a15ef667645e9bcf0b'}, 'OutputPolicyExecuteCount': {1: '95d198d2021dbaf9965df39a4571ad06e787684ff79bd6e8a720c47825eebd7e'}, 'OutputPolicyExecuteOnce': {1: 'b8bc1fea2e9b51b5dfc3cbd4b0a131cb2a5b1fe746b5e329395bf319b38bf9b2'}, 'UserPolicy': {1: 'ec3f8ea84e2b05ce56db8f35cff14f78569e921f566477581b3299eb6a9fa053'}, 'SubmitUserPolicy': {1: '3147a4e45270367a40ca8c4b7b502c8683200d123828720365521b90b2775794'}, 'UserCode': {1: 'ad509fccb7bb5c00971453c1f1235785f40d5d5b3eee3df1dc9edafc758c7193'}, 'SubmitUserCode': {1: 'b4a919054beb2488c7b4839d60c494400d791112adf009631dce13787cd58e78'}, 'UserCodeExecutionResult': {1: '9aab24def6616ac908ef1100b654b4dbeca1ea4cfff229c53d6b14491c795555'}, 'CodeHistory': {1: 'bbbd59801717a98691034a37c9de388c7a65db2721bd0c5c9ff0dbe8fc70be67'}, 'CodeHistoryView': {1: '142e78415da10dae739e923d39ce511496a3c7b31e8c4553a6cbb1907c126a3a'}, 'CodeHistoriesDict': {1: '453af101a1de8e37a0bfacf22077c35994718791f295956f1f06727f8d9b7fe8'}, 'UsersCodeHistoriesDict': {1: 'cf8ef92a08cabb068e4616c1700451b75ba4d511343830e3e56b65882fb784aa'}, 'NodePeer': {1: '8920d9e456fd1a13f46c0528a8fe3fec8af46440b3eb89e8d7d48ad64babee1e'}, 'CommandReport': {1: 'a81fe3d0cc5796f45e925d09b6e8132b79fe5df0f341d55b3870c109f8c1e19d'}, 'CommandResult': {1: '14b48d4a1cbc5f5ae1e5e74834e7f1002adae7b2766000ea822f180fd7cd39db'}, 'VPNClientConnection': {1: '7d44711978f930d42c06d04483abcdb1f230782a8f16f4feb5efb7b2b2093bb2'}, 'HeadscaleAuthToken': {1: '0b363503b6c611b44e33561a2a6db3f260cfd4bbc5f4245deac5052fd5149803'}, 'TailscalePeer': {1: '8ff85aa2b913a6bb206b9de0495d9f74a17f55823891da98cb6fdbe78f46a44b'}, 'TailscaleStatus': {1: 'ed262f4b9a569d9933f4a86cd2caa2ce213fc7a2319a1371f6a3cf3ccf884c8a'}, 'OnDiskBlobDeposit': {1: 'da3abda453def0d7c70c8a5dfcc3c8d00dd6822f60ddc01be3bdead4b0b5b482'}, 'SeaweedFSBlobDeposit': {1: 'bcbec5dcdc06a0c87f89a10a6a8809706f24cedd97b5f850f8b48840a1f41941'}, 'NumpyArrayObject': {1: 'd47a376401d92d47e5748e34f98ee270f8ebfd52cffbe6271b5faa8193e728c5'}, 'NumpyScalarObject': {1: '952bebb4dd3e3641c33b4ebcf2c051dbdebae5f1bf3b7b63ea89423360705411'}, 'NumpyBoolObject': {1: 'b7a231baaa4b1f519d70c5afb15b4a9b7232f1128f7fd3709c1ea8b7345f8c6c'}, 'PandasDataframeObject': {1: 'ff9d6c1884413f712d95d29190e30938b33de19e11dff9f88d9b89c51499cac5'}, 'PandasSeriesObject': {1: '69eadfe14e5a7035767d2538e2db8775da6569cf5127f58d13315c4b85e5603d'}, 'ReplyNotification': {1: 'ce1e2a6b0d618478d3b1b992e4c8605817919c88a4884ca0540e0886ecdb8215'}, 'Notification': {1: '1e5a65d91e27bf53d5b2ed0b45d9cee0cf77104b7111f99223194ceb0d0137fe'}, 'CreateNotification': {1: '6858b743ac07d853a0302dc64de0e7d852135e2564ebad325d5ff35d17c29f6f'}, 'Change': {1: '2c470ff8aa076b88105640ce79d361a9b439927e501c238fa33ac7c1c45aa2c0'}, 'ChangeStatus': {1: '7571229e92d8d52a0e90fa8856169b41045b42b50568b266823bdcea838dfb39'}, 'ActionStoreChange': {1: 'cf527995930cce09d90806713d30301493240079319bcc85e894428aee46017e'}, 'Request': {1: '340f4ac61ccbf6f566666327d6bca043dcd643e6f8e24897ef10bd6312e74995'}, 'RequestInfo': {1: 'd571708de3c187ca5840c64784d99f7bfce8f33aa2ba48f9d56b824564551654'}, 'RequestInfoFilter': {1: 'c336af8d474071eb61e5f467513753e64d4e153e12892f9c6875b235057b0f0a'}, 'SubmitRequest': {1: '1870ce541169eab04cb69d3ae88ea30dc2fcdd997b620567ca9d87936d9600cf'}, 'ObjectMutation': {1: '275d9cf180904d1e34e1f3d7e838105e843127faf5a64029a1cf85d00234b8c9'}, 'EnumMutation': {1: '3a1d1b47e0cdb5094298bce58bc9b76ecb66064459504f910c7d755eb1d5e276'}, 'UserCodeStatusChange': {1: '928dd4ceeb4858b18b806ca62b49a840f54269f7866744c9aa6edb0af9d7dfc1'}, 'SyftObjectMigrationState': {1: '194fd4dc57764d454ac763d256e3bfcd2b0040a134daf9ee0d8e5ac7ab21abbc'}, 'ProjectThreadMessage': {1: '9f8b11d603caae6d0e0f28957949dfc57c26fec9685f2c80501330b1d6bae665'}, 'ProjectMessage': {1: 'd678beafc33f7e7df7e771a82d5cba6d5a36728a033d3976b6e5998726733d27'}, 'ProjectRequestResponse': {1: '51b5a5d8cf0bde45abd2bd3a4411f93769fa542666a137ce9611d38fb48ffb4c'}, 'ProjectRequest': {1: '9eff1b3cc74c9706722363abb4062fc77c0a4f093d448b795ad662861649f111'}, 'AnswerProjectPoll': {1: 'f538a4fcae286cbc9755f51e2f2ce8809d66ce5d66f50173ef1824f89ce9b51d'}, 'ProjectPoll': {1: 'b456a699a249fd3fffe9739cdd9ec3ee8c05e59b2d9872ad9864167d78088091'}, 'Project': {1: 'bf59890e92d95b362cc7ef9c3d7fa6a1815978e02111a30cbcb047239e57d61e'}, 'ProjectSubmit': {1: '5084844056ddefcea7fc634dd9945c03ef6d030bcd8f63aa07fe11fea0a5389f'}, 'QueueItem': {1: '1d53446d5cd788120b15ea6b108a4a7abd480377370be7128f44297f8fb00b76'}, 'ZMQClientConfig': {1: 'e3153f18c9fd04cf07b844153d093c8a090baac4c99d71ecd6491961e7f1dafb'}, 'Plan': {1: '41713fc89a2cab7db592df6cd1c45e1309f86a50a8f531ddaf4052947186b0e0'}}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5db2bb72-f188-42fe-9f1c-e79150dddb0b", + "metadata": {}, + "outputs": [], + "source": [ + "import json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0b4e62d-548b-4ba5-97a0-ae81417c7346", + "metadata": {}, + "outputs": [], + "source": [ + "json.dumps(x)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "054fe585-2a32-4f18-af71-1a944a7e94d6", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# Exception: BaseConfig version 1 hash has changed.\n", + "# 332964663526a4d6ddc64bc9d77aa9324000bb721520a5465229235523fe8c7d not in \n", + "# dict_values([{'version': 1, 'hash': 'f35db90c1c3bdd993b7b1e4f28327a8f6b1d0e43115b044167ca28d5c740178a', 'action': 'add'}]). You probably need to bump the version number.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b89e304-d8f1-42ab-86c0-3552500e6853", + "metadata": {}, + "outputs": [], + "source": [ + "# k syft.service.service.ServiceConfig BaseConfig 1 332964663526a4d6ddc64bc9d77aa9324000bb721520a5465229235523fe8c7d\n", + "# got versions for canonical name BaseConfig {1: {'version': 1, 'hash': 'f35db90c1c3bdd993b7b1e4f28327a8f6b1d0e43115b044167ca28d5c740178a', 'action': 'add'}}\n", + "# is versoin in versions keys 1 dict_keys([1]) True\n", + "# is hash in hash values 332964663526a4d6ddc64bc9d77aa9324000bb721520a5465229235523fe8c7d dict_values([{'version': 1, 'hash': 'f35db90c1c3bdd993b7b1e4f28327a8f6b1d0e43115b044167ca28d5c740178a', 'action': 'add'}]) False\n", + "# else 1 dict_keys([1]) True" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b4697ff3-4781-489b-a506-759bbc9aa68a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de5a7d5a-6746-4d58-a5dd-f3ed1ea9a875", + "metadata": {}, + "outputs": [], + "source": [ + "import json" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f1f75c5-caa3-43e4-a28b-64864d3aa832", + "metadata": {}, + "outputs": [], + "source": [ + "import re\n", + "\n", + "def natural_key(key: int | str) -> list[int]:\n", + " \"\"\"Define key for natural ordering of strings.\"\"\"\n", + " if isinstance(key, int):\n", + " key = str(key)\n", + " return [int(s) if s.isdigit() else s for s in re.split('(\\d+)', key)]\n", + "\n", + "def sort_dict_naturally(d: dict) -> dict:\n", + " \"\"\"Sort dictionary by keys in natural order.\"\"\"\n", + " return {k: d[k] for k in sorted(d.keys(), key=natural_key)}\n", + "\n", + "\n", + "# Example usage:\n", + "d = {\n", + " \"apple10\": \"value10\",\n", + " \"apple2\": \"value2\",\n", + " \"banana\": \"value_banana\",\n", + " \"apple1\": \"value1\",\n", + "}\n", + "\n", + "sorted_d = sort_dict_naturally(d)\n", + "print(sorted_d)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8b3d6da-4a0f-4539-92bf-33130648c9ff", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe79ee3a-2a0c-4e60-bc44-c1e7d1d81d3d", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.types.syft_object import SyftMigrationRegistry, SyftObjectRegistry\n", + "from syft.service.metadata.migrations import *" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c0907163-d6ab-4a86-8ba9-b6f13c6a8390", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Optional" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "588beaf9-7aaf-4691-9de5-9165a92aa550", + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "def protocol_state_builder(protocol_dict: dict, stop_key: Optional[str] = None):\n", + " sorted_dict = sort_dict_naturally(protocol_dict)\n", + " state_dict = defaultdict(dict)\n", + " for k, v in sorted_dict.items():\n", + " # stop early\n", + " if stop_key == k:\n", + " return state_dict\n", + " print(\"k\", k, v)\n", + " object_versions = sorted_dict[k][\"object_versions\"]\n", + " print(\"got object versions\", object_versions)\n", + " for canonical_name, object_metadata in object_versions.items():\n", + " action = object_metadata[\"action\"]\n", + " version = object_metadata[\"version\"]\n", + " hash_str = object_metadata[\"hash\"]\n", + " print(\"canonical\", canonical_name, object_metadata)\n", + " versions = state_dict[canonical_name]\n", + " if action == \"add\" and (version in versions.keys() or hash_str in versions.values()):\n", + " raise Exception(f\"Can't add {object_metadata} already in state {versions}\")\n", + " elif action == \"remove\" and (version not in versions.keys() or hash_str not in versions.values()):\n", + " raise Exception(f\"Can't remove {object_metadata} missing from state {versions}\")\n", + " if action == \"add\":\n", + " versions[version] = hash_str\n", + " elif action == \"remove\":\n", + " del versions[version]\n", + " state_dict[canonical_name] = versions\n", + " return state_dict" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa30fea0-1eda-4806-b74f-39ae88c68031", + "metadata": {}, + "outputs": [], + "source": [ + "a = {}\n", + "a[\"1\"] = {}\n", + "a[\"1\"][\"object_versions\"] = {}\n", + "a[\"1\"][\"object_versions\"][\"Thing\"] = {}\n", + "a[\"1\"][\"object_versions\"][\"Thing\"][\"version\"] = 1\n", + "a[\"1\"][\"object_versions\"][\"Thing\"][\"hash\"] = \"abc\"\n", + "a[\"1\"][\"object_versions\"][\"Thing\"][\"action\"] = \"add\"\n", + "\n", + "a[\"2\"] = {}\n", + "a[\"2\"][\"object_versions\"] = {}\n", + "a[\"2\"][\"object_versions\"][\"Thing\"] = {}\n", + "a[\"2\"][\"object_versions\"][\"Thing\"][\"version\"] = 2\n", + "a[\"2\"][\"object_versions\"][\"Thing\"][\"hash\"] = \"def\"\n", + "a[\"2\"][\"object_versions\"][\"Thing\"][\"action\"] = \"add\"\n", + "\n", + "a[\"3\"] = {}\n", + "a[\"3\"][\"object_versions\"] = {}\n", + "a[\"3\"][\"object_versions\"][\"Thing\"] = {}\n", + "a[\"3\"][\"object_versions\"][\"Thing\"][\"version\"] = 1\n", + "a[\"3\"][\"object_versions\"][\"Thing\"][\"hash\"] = \"abc\"\n", + "a[\"3\"][\"object_versions\"][\"Thing\"][\"action\"] = \"remove\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b19fd8f9-cbb6-4ddb-9626-3ee1ddb2b874", + "metadata": {}, + "outputs": [], + "source": [ + "a" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "77afd622-1ab3-4720-9f7e-c290db2b315a", + "metadata": {}, + "outputs": [], + "source": [ + "b = protocol_state_builder(a)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "df702f68-a5b9-4aef-be53-60fff941fe59", + "metadata": {}, + "outputs": [], + "source": [ + "b" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac5ac02e-22d3-4655-aebb-e1d5ef182b06", + "metadata": {}, + "outputs": [], + "source": [ + "sy.serde.recursive.TYPE_BANK[\"numpy.ndarray\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "be941a27-fa20-4854-9b6b-99c55f1bc817", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a4085166-e092-4d53-a795-610a2b9d1a27", + "metadata": {}, + "outputs": [], + "source": [ + "# (\n", + "# nonrecursive,\n", + "# serialize,\n", + "# deserialize,\n", + "# attribute_list,\n", + "# exclude_attrs_list,\n", + "# serde_overrides,\n", + "# hash_exclude_attrs,\n", + "# cls,\n", + "# attribute_types,\n", + "# version,\n", + "# ) = sy.serde.recursive.TYPE_BANK[\"syft.service.metadata.node_metadata.NodeMetadataV2\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "47ceef1c-0a6f-4b20-aca1-017c0fcc0b55", + "metadata": {}, + "outputs": [], + "source": [ + "# sy.serde.recursive.TYPE_BANK.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0237c1a-536f-4780-ae40-18161d413c59", + "metadata": {}, + "outputs": [], + "source": [ + "def _calculate_object_hash(klass) -> str:\n", + " # TODO: this depends on what is marked as serde\n", + " field_data = {\n", + " field_name: repr(model_field.annotation)\n", + " for field_name, model_field in klass.__fields__.items()\n", + " }\n", + " obj_meta_info = {\n", + " \"canonical_name\": klass.__canonical_name__,\n", + " \"version\": klass.__version__,\n", + " \"unique_keys\": getattr(klass, \"__attr_unique__\", []),\n", + " \"field_data\": field_data,\n", + " }\n", + "\n", + " return hashlib.sha256(json.dumps(obj_meta_info).encode()).hexdigest()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8a1e67d5-d004-4929-8c88-fabe3735482d", + "metadata": {}, + "outputs": [], + "source": [ + "from syft.types.syft_object import SyftBaseObject\n", + "import hashlib" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe52e359-f632-4f2b-a84c-2e60d2362ed3", + "metadata": {}, + "outputs": [], + "source": [ + "def diff_state(state: dict) -> dict:\n", + " object_diff = {}\n", + " compare_dict = {}\n", + " type_bank = sy.serde.recursive.TYPE_BANK\n", + " for k in type_bank:\n", + " (\n", + " nonrecursive,\n", + " serialize,\n", + " deserialize,\n", + " attribute_list,\n", + " exclude_attrs_list,\n", + " serde_overrides,\n", + " hash_exclude_attrs,\n", + " cls,\n", + " attribute_types,\n", + " version,\n", + " ) = type_bank[k]\n", + " if issubclass(cls, SyftBaseObject):\n", + " hash_str = _calculate_object_hash(cls)\n", + " canonical_name = cls.__canonical_name__\n", + " print(\"k\", k, canonical_name, cls, version, hash_str)\n", + "\n", + " # build this up for later\n", + " compare_dict[canonical_name] = {}\n", + " compare_dict[canonical_name][version] = hash_str\n", + "\n", + " if canonical_name not in state:\n", + " # new object so its an add\n", + " object_diff[canonical_name] = {}\n", + " object_diff[canonical_name][\"version\"] = version\n", + " object_diff[canonical_name][\"hash\"] = hash_str\n", + " object_diff[canonical_name][\"action\"] = \"add\"\n", + " continue\n", + " \n", + " versions = object_diff[canonical_name]\n", + "\n", + " if version in versions.keys() and hash_str in versions.values():\n", + " # already there so do nothing\n", + " continue\n", + " elif version in versions.keys():\n", + " raise Exception(f\"{canonical_name} {cls} version {version} hash has changed. You probably need to bump the version number.\")\n", + " else:\n", + " # new object so its an add\n", + " object_diff[canonical_name] = {}\n", + " object_diff[canonical_name][\"version\"] = version\n", + " object_diff[canonical_name][\"hash\"] = hash_str\n", + " object_diff[canonical_name][\"action\"] = \"add\"\n", + " continue\n", + "\n", + " # now check for remove actions\n", + " for canonical_name in state:\n", + " for version, hash_str in state[canonical_name].items():\n", + " if canonical_name not in compare_dict:\n", + " # missing so its a remove\n", + " object_diff[canonical_name] = {}\n", + " object_diff[canonical_name][\"version\"] = version\n", + " object_diff[canonical_name][\"hash\"] = hash_str\n", + " object_diff[canonical_name][\"action\"] = \"remove\"\n", + " continue\n", + " versions = compare_dict[canonical_name]\n", + " if version in versions.keys():\n", + " # missing so its a remove\n", + " object_diff[canonical_name] = {}\n", + " object_diff[canonical_name][\"version\"] = version\n", + " object_diff[canonical_name][\"hash\"] = hash_str\n", + " object_diff[canonical_name][\"action\"] = \"remove\"\n", + " continue\n", + " return object_diff" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01838413-4b9c-4490-bc24-52e9e7f00c53", + "metadata": {}, + "outputs": [], + "source": [ + "g = diff_state(b)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "223e946b-65fc-4d1e-a4d5-9046f21f3d36", + "metadata": {}, + "outputs": [], + "source": [ + "g" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d1635cac-912f-49ee-9683-9b672671c811", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a2b2e8ba-232f-4fe1-8d18-b3ed41964bb0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01e5e780-ab38-46e0-923d-d3153e992d4e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a0ff7367-c239-4079-9802-c98292a51150", + "metadata": {}, + "outputs": [], + "source": [ + "a[\"1\"] = 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6c2fb95e-944a-405e-8282-116ccfeb64d6", + "metadata": {}, + "outputs": [], + "source": [ + "a[\"2\"] = 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12dfbc65-bc40-4ae4-9443-c3efd68359fb", + "metadata": {}, + "outputs": [], + "source": [ + "a[\"11\"] = 1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "09fcd132-0bd3-4b1c-8fa2-de02de5dad94", + "metadata": {}, + "outputs": [], + "source": [ + "a[\"dev\"] = 4" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b28d32f-0fd2-4e8b-a062-5f87352fa5d0", + "metadata": {}, + "outputs": [], + "source": [ + "a" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "df86253c-0230-4d61-b390-2a6693976e63", + "metadata": {}, + "outputs": [], + "source": [ + "y = json.dumps(a)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2cc10838-07f1-4e8e-b8b0-8247a90cef66", + "metadata": {}, + "outputs": [], + "source": [ + "y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ec13f3af-10fb-4b1a-8f2c-c2e2a3cc940c", + "metadata": {}, + "outputs": [], + "source": [ + "d = json.loads(y)\n", + "d" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9d263c27-a68e-4c6f-a0c6-b117ee044a1e", + "metadata": {}, + "outputs": [], + "source": [ + "d" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "961f4782-1acc-4eb1-ae6e-d3427dbd16b8", + "metadata": {}, + "outputs": [], + "source": [ + "for k,v in d.items():\n", + " print(\"k\", k, v)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "03766fe6-780a-4f9d-ac77-4f2720986b2f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b814b62e-8ebf-4bb3-a6d9-55cfed1506f7", + "metadata": {}, + "outputs": [], + "source": [ + "a[1] = \"a\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8237a9e-1556-42bb-9380-8197a90a9e32", + "metadata": {}, + "outputs": [], + "source": [ + "sorted_d = sort_dict_naturally(a)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "047465a4-6148-4d30-9da4-21be15ff7f03", + "metadata": {}, + "outputs": [], + "source": [ + "sorted_d" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "afb57bc4-66f2-49f8-a291-96be4e71dd43", + "metadata": {}, + "outputs": [], + "source": [ + "object_versions = dict()\n", + "object_versions[\"SyftObject\"] = {\"version\": \"1\", \"hash\": \"25a574002025025cfd155e3970305293e21fdd6af9dcde176990802306cc0359\", \"action\":\"add\"}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3bdf972b-cee3-4cde-867f-73b1efb20bde", + "metadata": {}, + "outputs": [], + "source": [ + "a[\"1\"] = object_versions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b2a7ebe9-98fb-4449-a73f-3b171a2df9d5", + "metadata": {}, + "outputs": [], + "source": [ + "a" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a466dfd5-54c4-4aa4-852c-473090758725", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index fe57d6e805b..1e49eb47597 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -28,6 +28,7 @@ from ..abstract_node import AbstractNode from ..node.credentials import SyftSigningKey from ..node.credentials import SyftVerifyKey +from ..protocol.data_protocol import PROTOCOL_TYPE from ..protocol.data_protocol import get_data_protocol from ..protocol.data_protocol import migrate_args_and_kwargs from ..serde.deserialize import _deserialize @@ -214,7 +215,7 @@ def generate_remote_function( path: str, make_call: Callable, pre_kwargs: Dict[str, Any], - communication_protocol: int, + communication_protocol: PROTOCOL_TYPE, warning: Optional[APIEndpointWarning], ): if "blocking" in signature.parameters: @@ -274,7 +275,7 @@ def generate_remote_lib_function( path: str, module_path: str, make_call: Callable, - communication_protocol: int, + communication_protocol: PROTOCOL_TYPE, pre_kwargs: Dict[str, Any], ): if "blocking" in signature.parameters: @@ -477,7 +478,7 @@ class SyftAPI(SyftObject): # serde / storage rules refresh_api_callback: Optional[Callable] = None __user_role: ServiceRole = ServiceRole.NONE - communication_protocol: int + communication_protocol: PROTOCOL_TYPE # def __post_init__(self) -> None: # pass @@ -505,9 +506,9 @@ def for_user( # If server uses a higher protocol version than client, then # signatures needs to be downgraded. - signature_needs_downgrade = int(node.current_protocol) > int( - communication_protocol - ) + signature_needs_downgrade = node.current_protocol != "dev" and int( + node.current_protocol + ) > int(communication_protocol) data_protocol = get_data_protocol() if signature_needs_downgrade: diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 1ad7ba8937b..59f7966742a 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -35,6 +35,7 @@ from ..node.credentials import SyftVerifyKey from ..node.credentials import UserLoginCredentials from ..protocol.data_protocol import DataProtocol +from ..protocol.data_protocol import PROTOCOL_TYPE from ..protocol.data_protocol import get_data_protocol from ..serde.deserialize import _deserialize from ..serde.serializable import serializable @@ -482,7 +483,10 @@ def post_init(self) -> None: def _get_communication_protocol(self, protocols_supported_by_server: List) -> int: data_protocol: DataProtocol = get_data_protocol() - protocols_supported_by_client: List[int] = data_protocol.supported_protocols + protocols_supported_by_client: List[ + PROTOCOL_TYPE + ] = data_protocol.supported_protocols + self.current_protocol = data_protocol.latest_version common_protocols = set(protocols_supported_by_client).intersection( protocols_supported_by_server diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 80d28c6959c..e869e8e8478 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -22,9 +22,10 @@ from ..types.syft_object import SyftBaseObject PROTOCOL_STATE_FILENAME = "protocol_version.json" +PROTOCOL_TYPE = str | int -def natural_key(key: int | str) -> list[int]: +def natural_key(key: PROTOCOL_TYPE) -> list[int]: """Define key for natural ordering of strings.""" if isinstance(key, int): key = str(key) @@ -79,8 +80,8 @@ def save_history(self, history: dict) -> None: self.file_path.write_text(json.dumps(history, indent=2) + "\n") @property - def latest_version(self) -> str: - sorted_versions = natural_key(self.state.keys()) + def latest_version(self) -> PROTOCOL_TYPE: + sorted_versions = sorted(self.protocol_history.keys(), key=natural_key) if len(sorted_versions) > 0: return sorted_versions[-1] return "dev" @@ -163,9 +164,11 @@ def diff_state(self, state: dict) -> tuple[dict, dict]: continue elif str(version) in versions.keys(): raise Exception( - f"{canonical_name} {cls} version {version} hash has changed. " + f"{canonical_name} for class {cls.__name__} fqn {cls} " + + f"version {version} hash has changed. " + f"{hash_str} not in {versions.values()}. " - + "You probably need to bump the version number." + + "Is a unique __canonical_name__ for this subclass missing? " + + "If the class has changed you will need to bump the version number." ) else: # new object so its an add @@ -277,6 +280,12 @@ def calculate_supported_protocols(self) -> None: def get_object_versions(self, protocol: Union[int, str]) -> list: return self.protocol_history[str(protocol)]["object_versions"] + @property + def has_dev(self) -> bool: + if "dev" in self.protocol_history.keys(): + return True + return False + def get_data_protocol(): return DataProtocol(filename=data_protocol_file_name()) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 0967ef424bc..ce4cb4303d3 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -1 +1,774 @@ -{} +{ + "dev": { + "object_versions": { + "PartialSyftObject": { + "1": { + "version": 1, + "hash": "008917584d8e1c09015cdbef02f59c0622f48e0618877c1b44425c8846befc13", + "action": "add" + } + }, + "NodeMetadataUpdate": { + "1": { + "version": 1, + "hash": "569d124c23590360bda240c19b53314ccc6204c5d1ab0d2898976a028e002191", + "action": "add" + } + }, + "NodeMetadata": { + "1": { + "version": 1, + "hash": "6bee018894dfdf697ea624740d0bf051750e0b0d8470ced59646f6d8812068ac", + "action": "add" + }, + "2": { + "version": 2, + "hash": "f856169fea72486cd436875ce4411ef935da11eb7c5af48121adfa00d4c0cdb6", + "action": "add" + } + }, + "LinkedObject": { + "1": { + "version": 1, + "hash": "824567c6933c095d0e2f6995c8de3581c0fbd2e9e4ead35c8159f7964709c28e", + "action": "add" + } + }, + "BaseConfig": { + "1": { + "version": 1, + "hash": "4e5257080ce615aa4122b02bad8487e4c7d6d0f171ff77abbc9e8cd3e33df89a", + "action": "add" + } + }, + "ServiceConfig": { + "1": { + "version": 1, + "hash": "ca91f59bf045d949d82860f7d52655bfbede4cf6bdc5bae8f847f08a16f05d74", + "action": "add" + } + }, + "LibConfig": { + "1": { + "version": 1, + "hash": "c6ff229aea16874c5d9ae4d1f9e500d13f5cf984bbcee7abd16c5841707a2f78", + "action": "add" + } + }, + "APIEndpoint": { + "1": { + "version": 1, + "hash": "c0e83867b107113e6fed06364ba364c24b2f4af35b15a3869b176318d3be7989", + "action": "add" + } + }, + "LibEndpoint": { + "1": { + "version": 1, + "hash": "153eac6d8990774eebfffaa75a9895e7c4e1a0e09465d5da0baf4c3a3b03369d", + "action": "add" + } + }, + "SignedSyftAPICall": { + "1": { + "version": 1, + "hash": "e66a116de2fa44ebdd0d4c2d7d5a047dedb555fd201a0f431cd8017d9d33a61d", + "action": "add" + } + }, + "SyftAPICall": { + "1": { + "version": 1, + "hash": "014bd1d0933f6070888a313edba239170759de24eae49bf2374c1be4dbe2b4d7", + "action": "add" + } + }, + "SyftAPIData": { + "1": { + "version": 1, + "hash": "db101a75227e34750d7056785a1e87bb2e8ad6604f19c372d0cb6aa437243bf5", + "action": "add" + } + }, + "SyftAPI": { + "1": { + "version": 1, + "hash": "bde651f293314585412aede9c86e9703e1f0b6b45b8d9143d831573087e4e078", + "action": "add" + } + }, + "User": { + "1": { + "version": 1, + "hash": "078636e64f737e60245b39cf348d30fb006531e80c12b70aa7cf98254e1bb37a", + "action": "add" + } + }, + "UserUpdate": { + "1": { + "version": 1, + "hash": "839dd90aeb611e1dc471c8fd6daf230e913465c0625c6a297079cb7f0a271195", + "action": "add" + } + }, + "UserCreate": { + "1": { + "version": 1, + "hash": "dab78b63544ae91c09f9843c323cb237c0a6fcfeb71c1acf5f738e2fcf5c277f", + "action": "add" + } + }, + "UserSearch": { + "1": { + "version": 1, + "hash": "69d1e10b81c8a4143cf70e4f911d8562732af2458ebbc455ca64542f11373dd1", + "action": "add" + } + }, + "UserView": { + "1": { + "version": 1, + "hash": "63289383fe7e7584652f242a4362ce6e2f0ade52f6416ab6149b326a506b0675", + "action": "add" + } + }, + "UserViewPage": { + "1": { + "version": 1, + "hash": "16dac6209b19a934d286ef1efa874379e0040c324e71023c57d1bc6d2d367171", + "action": "add" + } + }, + "UserPrivateKey": { + "1": { + "version": 1, + "hash": "7cb196587887f0f3bffb298dd9f3b88509e9b2748792bf8dc03bdd0d6b98714a", + "action": "add" + } + }, + "StoreConfig": { + "1": { + "version": 1, + "hash": "17de8875cf590311ddb042140347ffc79d4a85028e504dad178ca4e1237ec861", + "action": "add" + } + }, + "NodeSettingsUpdate": { + "1": { + "version": 1, + "hash": "b6ddc66ff270a3c2c4760e31e1a55d72ed04ccae2d0115ebe2fba6f2bf9bd119", + "action": "add" + } + }, + "NodeSettings": { + "1": { + "version": 1, + "hash": "b662047bb278f4f5db77c102f94b733c3a929839271b3d6b82ea174a60e2aaf0", + "action": "add" + } + }, + "HTTPConnection": { + "1": { + "version": 1, + "hash": "5ee19eaf55ecbe7945ea45924c036ec0f500114a2f64176620961a8c2ec94cdb", + "action": "add" + } + }, + "PythonConnection": { + "1": { + "version": 1, + "hash": "011946fc9af0a6987f5c7bc9b0208b2fae9d65217531430bced7ba542788da1a", + "action": "add" + } + }, + "DateTime": { + "1": { + "version": 1, + "hash": "7e9d89309a10d2110a7ae4f97d8f25a7914853269e8fa0c531630790c1253f17", + "action": "add" + } + }, + "BlobFile": { + "1": { + "version": 1, + "hash": "47ed55183d619c6c624e35412360a41de42833e2c24223c1de1ad12a84fdafc2", + "action": "add" + } + }, + "SecureFilePathLocation": { + "1": { + "version": 1, + "hash": "7febc066e2ee5a3a4a891720afede3f5c155cacc0557662ac4d04bf67b964c6d", + "action": "add" + } + }, + "SeaweedSecureFilePathLocation": { + "1": { + "version": 1, + "hash": "5724a38b1a92b8a55da3d9cc34a720365a6d0c32683acda630fc44067173e201", + "action": "add" + } + }, + "BlobStorageEntry": { + "1": { + "version": 1, + "hash": "9f1b027cce390ee6f71c7a81e7420bb71a477b29c6c62ba74e781a97bc5434e6", + "action": "add" + } + }, + "BlobStorageMetadata": { + "1": { + "version": 1, + "hash": "6888943be3f97186190dd26d7eefbdf29b15c6f2fa459e13608065ebcdb799e2", + "action": "add" + } + }, + "CreateBlobStorageEntry": { + "1": { + "version": 1, + "hash": "61a373336e83645f1b6d78a320323d9ea4ee91b3d87b730cb0608fbfa0072262", + "action": "add" + } + }, + "BlobRetrieval": { + "1": { + "version": 1, + "hash": "a8d7e1d6483e7a9b5a130e837fa398862aa6cbb316cc5f4470450d835755fdd9", + "action": "add" + } + }, + "SyftObjectRetrieval": { + "1": { + "version": 1, + "hash": "7ccc62d5b434d2d438b3df661b4d753b0c7c8d593d451d8b86d364da83998c89", + "action": "add" + } + }, + "BlobRetrievalByURL": { + "1": { + "version": 1, + "hash": "18fd860cb9de296532fc9ff075932e6a4377cc8f043dd88ed4f620517321077d", + "action": "add" + } + }, + "BlobDeposit": { + "1": { + "version": 1, + "hash": "c98e6da658a3be01ead4ea6ee6a4c10046879f0ce0f5fc5f946346671579b229", + "action": "add" + } + }, + "WorkerSettings": { + "1": { + "version": 1, + "hash": "0dcd95422ec8a7c74e45ee68a125084c08f898dc94a13d25fe5a5fd0e4fc5027", + "action": "add" + } + }, + "HTTPNodeRoute": { + "1": { + "version": 1, + "hash": "199423cebb9427d22a5c0e4e2210230be6c64d2996aa35a1d6f7677d0ebf945d", + "action": "add" + } + }, + "PythonNodeRoute": { + "1": { + "version": 1, + "hash": "4ad5eccc9e849c81bd7a6f18199cd434a7eab107c07a6ff3a870d561ae99e69e", + "action": "add" + } + }, + "EnclaveMetadata": { + "1": { + "version": 1, + "hash": "39f85e475015e6f860ddcc5fea819423eba2db8f4b7d8e004c05a44d6f8444c6", + "action": "add" + } + }, + "DataSubject": { + "1": { + "version": 1, + "hash": "0b8b049d4627727b444c419f5d6a97b7cb97a433088ebf744c854b6a470dadf1", + "action": "add" + } + }, + "DataSubjectCreate": { + "1": { + "version": 1, + "hash": "5a94f9fcba75c50d78d71222f0235c5fd4d8003ae0db4d74bdbc4d56a99de3aa", + "action": "add" + } + }, + "DataSubjectMemberRelationship": { + "1": { + "version": 1, + "hash": "0a820edc9f1a87387acc3c611fe852752fcb3dab7608058f2bc48211be7bfbd2", + "action": "add" + } + }, + "Contributor": { + "1": { + "version": 1, + "hash": "d1d4f25bb87e59c0414501d3335097de66815c164c9ed5a7850ff8bec69fbcdc", + "action": "add" + } + }, + "MarkdownDescription": { + "1": { + "version": 1, + "hash": "519328a3952049f57004013e4fb00840695b24b8575cad983056412c9c9d9ba6", + "action": "add" + } + }, + "Asset": { + "1": { + "version": 1, + "hash": "655a8b9ca076bc7cb48833954ccc86634d78d24f76c77d3d9de1c677a00415d7", + "action": "add" + } + }, + "CreateAsset": { + "1": { + "version": 1, + "hash": "c94660b20a96ee4347210d2ee9ea5c60a221e55e2921ea5ea28a1ee606020ffb", + "action": "add" + } + }, + "Dataset": { + "1": { + "version": 1, + "hash": "ab8fec7aa0a422a7b55209bbe87991d7085ef70df9dcf1c3538ab9a355f1554d", + "action": "add" + } + }, + "DatasetPageView": { + "1": { + "version": 1, + "hash": "7e7403e5de953b0730569861eb2e0df7a63b4360726c4b6b6939be09ad16df2a", + "action": "add" + } + }, + "CreateDataset": { + "1": { + "version": 1, + "hash": "e47f467815eaf6a40762407da0debaeac5093e36b76e54d630486fe43c1d93d6", + "action": "add" + } + }, + "ActionDataEmpty": { + "1": { + "version": 1, + "hash": "89b5912fe5416f922051b8068be6071a03c87a4ab264959de524f1b86e95f028", + "action": "add" + } + }, + "ActionFileData": { + "1": { + "version": 1, + "hash": "1f32d94b75b0a6b4e86cec93d94aa905738219e3e7e75f51dd335ee832a6ed3e", + "action": "add" + } + }, + "Action": { + "1": { + "version": 1, + "hash": "5cf71ee35097f17fbb1dd05096f875211d71cf07161205d7f6a9c11fd49d5272", + "action": "add" + } + }, + "AnyActionObject": { + "1": { + "version": 1, + "hash": "bcb31f847907edc9c95d2d120dc5427854604f40940e3f41cd0474a1820ac65e", + "action": "add" + } + }, + "TwinObject": { + "1": { + "version": 1, + "hash": "c42455586b43724a7421becd99122b787a129798daf6081e96954ecaea228099", + "action": "add" + } + }, + "ExactMatch": { + "1": { + "version": 1, + "hash": "e497e2e2380db72766c5e219e8afd13136d8953933d6f1eaf83b14001e887cde", + "action": "add" + } + }, + "OutputHistory": { + "1": { + "version": 1, + "hash": "4ec6e6efd86a972b474251885151bdfe4ef262562174605e8ab6a8abba1aa867", + "action": "add" + } + }, + "OutputPolicyExecuteCount": { + "1": { + "version": 1, + "hash": "6bb24b3b35e19564c43b838ca3f46ccdeadb6596511917f2d220681a378e439d", + "action": "add" + } + }, + "OutputPolicyExecuteOnce": { + "1": { + "version": 1, + "hash": "32a40fc9966b277528eebc61c01041f3a5447417731954abdaffbb14dabc76bb", + "action": "add" + } + }, + "UserPolicy": { + "1": { + "version": 1, + "hash": "c69b17b1d96cace8b45da6d9639165f2da4aa7ff156b6fd922ac217bf7856d8a", + "action": "add" + } + }, + "SubmitUserPolicy": { + "1": { + "version": 1, + "hash": "96f7f39279fadc70c569b8d48ed4d6420a8132db51e37466d272fda19953554b", + "action": "add" + } + }, + "UserCode": { + "1": { + "version": 1, + "hash": "e14c22686cdc7d1fb2b0d01c0aebdea37e62a61b051677c1d30234214f05cd42", + "action": "add" + } + }, + "SubmitUserCode": { + "1": { + "version": 1, + "hash": "f572d32350d09e25b29572c591029d37a216818618c383094404f84bc9c15dd6", + "action": "add" + } + }, + "UserCodeExecutionResult": { + "1": { + "version": 1, + "hash": "49c32e85e78b7b189a7f13b7e26115ef94fcb0b60b578adcbe2b95e289f63a6e", + "action": "add" + } + }, + "CodeHistory": { + "1": { + "version": 1, + "hash": "a7baae93862ae0aa67675f1617574e31aafb15a9ebff633eb817278a3a867161", + "action": "add" + } + }, + "CodeHistoryView": { + "1": { + "version": 1, + "hash": "0ed1a2a04a962ecbcfa38b0b8a03c1e51e8946a4b80f6bf2557148ce658671ce", + "action": "add" + } + }, + "CodeHistoriesDict": { + "1": { + "version": 1, + "hash": "95288411cd5843834f3273a2fd66a7df2e603e980f4ab1d329f9ab17d5d2f643", + "action": "add" + } + }, + "UsersCodeHistoriesDict": { + "1": { + "version": 1, + "hash": "5e1f389c4565ee8558386dd5c934d81e0c68ab1434f86bb9065976b587ef44d1", + "action": "add" + } + }, + "NodePeer": { + "1": { + "version": 1, + "hash": "50c5b7867d450c1af8011830339c07f5e7bd92589daeac976e0ab86151082cbc", + "action": "add" + } + }, + "CommandReport": { + "1": { + "version": 1, + "hash": "81c6f248e89f6191f75afb7170f82b616377ae46c1d809f6047e13c6f2f299d5", + "action": "add" + } + }, + "CommandResult": { + "1": { + "version": 1, + "hash": "65588691901dc0562afb650b0abe04fe6e3e3db516abda3ec82a371ce459ed0d", + "action": "add" + } + }, + "VPNClientConnection": { + "1": { + "version": 1, + "hash": "cf12dcf5066e6c441e6841ae24bd295d8331e7920c79473bfabc17c5e47cf79e", + "action": "add" + } + }, + "HeadscaleAuthToken": { + "1": { + "version": 1, + "hash": "d88dbe672feb126eb1c5f36208edb1effcfe8721fb6619a9ac62ed3fff8f1546", + "action": "add" + } + }, + "TailscalePeer": { + "1": { + "version": 1, + "hash": "603da3a1087e8d80a6b7e4cd6ccaccd7e1baf3ec77265d810e3a9e9cd233ac04", + "action": "add" + } + }, + "TailscaleStatus": { + "1": { + "version": 1, + "hash": "52a7bd4b72f160a8b14466e98bb2b2a70c4f49aaff635e844fa40e955f7d76d2", + "action": "add" + } + }, + "OnDiskBlobDeposit": { + "1": { + "version": 1, + "hash": "5efc230c1ee65c4626d334aa69ed458c796c45265e546a333844c6c2bcd0e6b0", + "action": "add" + } + }, + "SeaweedFSBlobDeposit": { + "1": { + "version": 1, + "hash": "382a9ac178deed2a9591e1ebbb39f265cbe67027fb93a420d473a4c26b7fda11", + "action": "add" + } + }, + "DictStoreConfig": { + "1": { + "version": 1, + "hash": "256e9c623ce0becd555ddd2a55a0c15514e162786b1549388cef98a92a9b18c9", + "action": "add" + } + }, + "NumpyArrayObject": { + "1": { + "version": 1, + "hash": "dcc7b44fa5ad22ae0bc576948f856c172dac1e9de2bc8e2a302e428f3309a278", + "action": "add" + } + }, + "NumpyScalarObject": { + "1": { + "version": 1, + "hash": "5c1b6b6e8ba88bc79e76646d621489b889fe8f9b9fd59f117d594be18a409633", + "action": "add" + } + }, + "NumpyBoolObject": { + "1": { + "version": 1, + "hash": "a5c822a6a3ca9eefd6a2b68f7fd0bc614fba7995f6bcc30bdc9dc882296b9b16", + "action": "add" + } + }, + "PandasDataframeObject": { + "1": { + "version": 1, + "hash": "35058924b3de2e0a604a92f91f4dd2e3cc0dac80c219d34f360e7cedd52f5f4c", + "action": "add" + } + }, + "PandasSeriesObject": { + "1": { + "version": 1, + "hash": "2a0d8a55f1c27bd8fccd276cbe01bf272c40cab10417d7027273983fed423caa", + "action": "add" + } + }, + "ReplyNotification": { + "1": { + "version": 1, + "hash": "34b2ad522f7406c2486573467d9c7acef5c1063a0d9f2177c3bda2d8c4f87572", + "action": "add" + } + }, + "Notification": { + "1": { + "version": 1, + "hash": "d13981f721fe2b3e2717640ee07dc716c596e4ecd442461665c3fdab0b85bf0e", + "action": "add" + } + }, + "CreateNotification": { + "1": { + "version": 1, + "hash": "b1f459de374fe674f873a4a5f3fb8a8aabe0d83faad84a933f0a77dd1141159a", + "action": "add" + } + }, + "Change": { + "1": { + "version": 1, + "hash": "aefebd1601cf5bfd4817b0db75300a78299cc4949ead735a90873cbd22c8d4bc", + "action": "add" + } + }, + "ChangeStatus": { + "1": { + "version": 1, + "hash": "627f6f8e42cc285336aa6fd4916285d796140f4ff901487b7cb3907ef0f116a6", + "action": "add" + } + }, + "ActionStoreChange": { + "1": { + "version": 1, + "hash": "17b865e75eb3fb2693924fb00ba87a25260be45d55a4eb2184c4ead22d787cbe", + "action": "add" + } + }, + "Request": { + "1": { + "version": 1, + "hash": "e054307eeb7f13683cde9ce7613d5ca2925a13fff7c345b1c9f729a12c955f90", + "action": "add" + } + }, + "RequestInfo": { + "1": { + "version": 1, + "hash": "b76075c138afc0563ce9ac7f6b1131f048951f7486cd516c02736dc1a2a23639", + "action": "add" + } + }, + "RequestInfoFilter": { + "1": { + "version": 1, + "hash": "7103abdc464ae71bb746410f5730f55dd8ed82268aa32bbb0a69e0070488a669", + "action": "add" + } + }, + "SubmitRequest": { + "1": { + "version": 1, + "hash": "96b4ec12beafd9d8a7c97399cb8a23dade4db16d8f521be3fe7b8fec99db5161", + "action": "add" + } + }, + "ObjectMutation": { + "1": { + "version": 1, + "hash": "0ee3dd38d6df0fe9a19d848e8f3aaaf13a6ba86afe3406c239caed6da185651a", + "action": "add" + } + }, + "EnumMutation": { + "1": { + "version": 1, + "hash": "4c02f956ec9b973064972cc57fc8dd9c525e683f93f804642b4e1bfee1b62e57", + "action": "add" + } + }, + "UserCodeStatusChange": { + "1": { + "version": 1, + "hash": "4f5b405cc2b3976ed8f7018df82e873435d9187dff15fa5a23bc85a738969f3f", + "action": "add" + } + }, + "SyftObjectMigrationState": { + "1": { + "version": 1, + "hash": "d3c8126bc15dae4dd243bb035530e3f56cd9e433d403dd6b5f3b45face6d281f", + "action": "add" + } + }, + "ProjectThreadMessage": { + "1": { + "version": 1, + "hash": "1118e935792e8e54103dbf91fa33edbf192a7767d2b1d4526dfa7d4a643cde2e", + "action": "add" + } + }, + "ProjectMessage": { + "1": { + "version": 1, + "hash": "55a3a5171b6949372b4125cc461bf39bc998565e07703804fca6c7ef99695ae4", + "action": "add" + } + }, + "ProjectRequestResponse": { + "1": { + "version": 1, + "hash": "d4c360e845697a0b24695143d0781626cd344cfde43162c90ae90fe67e00ae21", + "action": "add" + } + }, + "ProjectRequest": { + "1": { + "version": 1, + "hash": "514d189df335c68869eea36befcdcafec74bdc682eaf18871fe879e26da4dbb6", + "action": "add" + } + }, + "AnswerProjectPoll": { + "1": { + "version": 1, + "hash": "ff2e1ac7bb764c99d646b96eb3ebfbf9311599b7e3be07aa4a4eb4810bb6dd12", + "action": "add" + } + }, + "ProjectPoll": { + "1": { + "version": 1, + "hash": "b0ac8f1d9c06997374ddbc33fdf1d0af0da15fdb6899f52d91a8574106558964", + "action": "add" + } + }, + "Project": { + "1": { + "version": 1, + "hash": "ec5b7ac1c92808e266f06b175c6ebcd50be81777ad120c02ce8c6074d0004788", + "action": "add" + } + }, + "ProjectSubmit": { + "1": { + "version": 1, + "hash": "0374b37779497d7e0b2ffeabc38d35bfbae2ee762a7674a5a8af75e7c5545e61", + "action": "add" + } + }, + "QueueItem": { + "1": { + "version": 1, + "hash": "5aa94681d9d0715d5b605f9625a54e114927271378cf2ea7245f85c488035e0b", + "action": "add" + } + }, + "ZMQClientConfig": { + "1": { + "version": 1, + "hash": "e6054969b495791569caaf33239039beae3d116e1fe74e9575467c48b9007c45", + "action": "add" + } + }, + "SQLiteStoreConfig": { + "1": { + "version": 1, + "hash": "b656b26c14cf4e97aba702dd62a0927aec7f860c12eed512c2c688e1b7109aa5", + "action": "add" + } + }, + "Plan": { + "1": { + "version": 1, + "hash": "a0bba2b7792c9e08c453e9e256f0ac6e6185610726566bcd50b057ae83b42d9a", + "action": "add" + } + } + } + } +} diff --git a/packages/syft/src/syft/service/action/action_graph.py b/packages/syft/src/syft/service/action/action_graph.py index 489e51e91e5..c3e25ac098b 100644 --- a/packages/syft/src/syft/service/action/action_graph.py +++ b/packages/syft/src/syft/service/action/action_graph.py @@ -344,6 +344,8 @@ def _load_from_path(file_path: str) -> None: @serializable() class InMemoryGraphConfig(StoreConfig): + __canonical_name__ = "InMemoryGraphConfig" + store_type: Type[BaseGraphStore] = NetworkXBackingStore client_config: StoreClientConfig = InMemoryStoreClientConfig() locking_config: LockingConfig = ThreadingLockingConfig() @@ -356,6 +358,8 @@ class ActionGraphStore: @serializable() class InMemoryActionGraphStore(ActionGraphStore): + __canonical_name__ = "InMemoryActionGraphStore" + def __init__(self, store_config: StoreConfig, reset: bool = False): self.store_config: StoreConfig = store_config self.graph: Type[BaseGraphStore] = self.store_config.store_type( diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index ee6fa62a49f..748abac2ba1 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -9,10 +9,12 @@ # third party from packaging import version from pydantic import BaseModel +from pydantic import root_validator # relative from ...abstract_node import NodeType from ...node.credentials import SyftVerifyKey +from ...protocol.data_protocol import get_data_protocol from ...serde.serializable import serializable from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 @@ -138,21 +140,19 @@ class NodeMetadataJSON(BaseModel, StorableObjectType): show_warnings: bool supported_protocols: List = [] - # breaks Object of type UID is not JSON serializable - # @root_validator(pre=True) - # def add_protocol_versions(cls, values: Dict) -> Dict: - # if "supported_protocols" not in values: - # data_protocol = get_data_protocol() - # values["supported_protocols"] = data_protocol.supported_protocols - # return values + @root_validator(pre=True) + def add_protocol_versions(cls, values: dict) -> dict: + if "supported_protocols" not in values: + data_protocol = get_data_protocol() + values["supported_protocols"] = data_protocol.supported_protocols + return values def check_version(self, client_version: str) -> bool: - return True - # return check_version( - # client_version=client_version, - # server_version=self.syft_version, - # server_name=self.name, - # ) + return check_version( + client_version=client_version, + server_version=self.syft_version, + server_name=self.name, + ) @transform(NodeMetadata, NodeMetadataJSON) diff --git a/packages/syft/src/syft/store/mongo_document_store.py b/packages/syft/src/syft/store/mongo_document_store.py index b41f60ff17b..9a79aac1253 100644 --- a/packages/syft/src/syft/store/mongo_document_store.py +++ b/packages/syft/src/syft/store/mongo_document_store.py @@ -574,6 +574,7 @@ class MongoDocumentStore(DocumentStore): @serializable() class MongoStoreConfig(StoreConfig): + __canonical_name__ = "MongoStoreConfig" """Mongo Store configuration Parameters: diff --git a/packages/syft/tests/syft/stores/store_mocks_test.py b/packages/syft/tests/syft/stores/store_mocks_test.py index bba35d928dd..38a6824cc76 100644 --- a/packages/syft/tests/syft/stores/store_mocks_test.py +++ b/packages/syft/tests/syft/stores/store_mocks_test.py @@ -52,17 +52,19 @@ class MockObjectType(SyftObject): @serializable() class MockStore(DocumentStore): + __canonical_name__ = "MockStore" pass @serializable() class MockSyftObject(SyftObject): - __canonical_name__ = UID() + __canonical_name__ = str(UID()) data: Any @serializable() class MockStoreConfig(StoreConfig): + __canonical_name__ = "MockStoreConfig" store_type: Type[DocumentStore] = MockStore db_name: str = "testing" backing_store: Type[KeyValueBackingStore] = MockKeyValueBackingStore From c5e965c9b96e95b8996a8618740f3cac4c8dfc54 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 11:04:31 +0530 Subject: [PATCH 37/67] fix migrate_arg_kwargs methods - update protocol version - fix settings tests --- packages/syft/src/syft/protocol/data_protocol.py | 8 ++++---- .../syft/src/syft/protocol/protocol_version.json | 2 +- .../tests/syft/settings/settings_service_test.py | 14 +++++++------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index e869e8e8478..115d5c554f3 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -22,7 +22,7 @@ from ..types.syft_object import SyftBaseObject PROTOCOL_STATE_FILENAME = "protocol_version.json" -PROTOCOL_TYPE = str | int +PROTOCOL_TYPE = Union[str, int] def natural_key(key: PROTOCOL_TYPE) -> list[int]: @@ -325,14 +325,14 @@ def migrate_args_and_kwargs( if to_protocol == data_protocol.latest_version: return args, kwargs - object_versions = data_protocol.get_object_versions(protocol=to_protocol) + protocol_state = data_protocol.state migrated_kwargs, migrated_args = {}, [] for param_name, param_val in kwargs.items(): if isinstance(param_val, SyftBaseObject): current_version = int(param_val.__version__) - migrate_to_version = int(max(object_versions[param_val.__canonical_name__])) + migrate_to_version = int(max(protocol_state[param_val.__canonical_name__])) if current_version > migrate_to_version: # downgrade versions = range(current_version - 1, migrate_to_version - 1, -1) else: # upgrade @@ -344,7 +344,7 @@ def migrate_args_and_kwargs( for arg in args: if isinstance(arg, SyftBaseObject): current_version = int(arg.__version__) - migrate_to_version = int(max(object_versions[arg.__canonical_name__])) + migrate_to_version = int(max(protocol_state[arg.__canonical_name__])) if current_version > migrate_to_version: # downgrade versions = range(current_version - 1, migrate_to_version - 1, -1) else: # upgrade diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index ce4cb4303d3..8e9c5c70952 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -93,7 +93,7 @@ "SyftAPI": { "1": { "version": 1, - "hash": "bde651f293314585412aede9c86e9703e1f0b6b45b8d9143d831573087e4e078", + "hash": "2bba1d9fcf677a58e35bf903de3da22ee4913af138aa3012af9c46b3609579cd", "action": "add" } }, diff --git a/packages/syft/tests/syft/settings/settings_service_test.py b/packages/syft/tests/syft/settings/settings_service_test.py index ba1aac7b0f7..a8ba4d0da68 100644 --- a/packages/syft/tests/syft/settings/settings_service_test.py +++ b/packages/syft/tests/syft/settings/settings_service_test.py @@ -14,7 +14,7 @@ from syft.node.credentials import SyftSigningKey from syft.node.credentials import SyftVerifyKey from syft.service.context import AuthedServiceContext -from syft.service.metadata.node_metadata import NodeMetadataV1 +from syft.service.metadata.node_metadata import NodeMetadata from syft.service.response import SyftError from syft.service.response import SyftSuccess from syft.service.settings.settings import NodeSettings @@ -227,11 +227,11 @@ def test_settings_allow_guest_registration( # Create a new worker verify_key = SyftSigningKey.generate().verify_key - mock_node_metadata = NodeMetadataV1( + mock_node_metadata = NodeMetadata( name=faker.name(), verify_key=verify_key, - highest_object_version=1, - lowest_object_version=2, + highest_version=1, + lowest_version=2, syft_version=syft.__version__, signup_enabled=False, admin_email="info@openmined.org", @@ -309,11 +309,11 @@ def get_mock_client(faker, root_client, role): return guest_client verify_key = SyftSigningKey.generate().verify_key - mock_node_metadata = NodeMetadataV1( + mock_node_metadata = NodeMetadata( name=faker.name(), verify_key=verify_key, - highest_object_version=1, - lowest_object_version=2, + highest_version=1, + lowest_version=2, syft_version=syft.__version__, signup_enabled=False, admin_email="info@openmined.org", From 496395c910bb0a7d36d6b441a523527398ceb637 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 11:21:54 +0530 Subject: [PATCH 38/67] bump version and save --- packages/syft/src/syft/protocol/protocol_version.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 8e9c5c70952..e28df4357f8 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -1,5 +1,5 @@ { - "dev": { + "1": { "object_versions": { "PartialSyftObject": { "1": { From b2b98924afc4bd1f94e8ce65c6283f3b91ec28ce Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 16:26:12 +0530 Subject: [PATCH 39/67] fix calculation of supported versions - fix calcuation of state based for given protocol in migrate args and kwargs - serialize ActionObject --- packages/syft/src/syft/client/api.py | 6 ++++- .../syft/src/syft/protocol/data_protocol.py | 22 +++++++------------ .../src/syft/service/action/action_object.py | 1 + 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 1e49eb47597..90636045ded 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -431,7 +431,11 @@ def unwrap_and_migrate_annotation(annotation, object_versions): args = get_args(annotation) if len(args) == 0: print(annotation) - if isinstance(annotation, type) and issubclass(annotation, SyftBaseObject): + if ( + isinstance(annotation, type) + and issubclass(annotation, SyftBaseObject) + and annotation.__canonical_name__ in object_versions + ): downgrade_to_version = int( max(object_versions[annotation.__canonical_name__]) ) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 115d5c554f3..65f56e4f629 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -83,7 +83,7 @@ def save_history(self, history: dict) -> None: def latest_version(self) -> PROTOCOL_TYPE: sorted_versions = sorted(self.protocol_history.keys(), key=natural_key) if len(sorted_versions) > 0: - return sorted_versions[-1] + return int(sorted_versions[-1]) return "dev" @staticmethod @@ -94,9 +94,6 @@ def build_state(self, stop_key: Optional[str] = None) -> dict: sorted_dict = sort_dict_naturally(self.protocol_history) state_dict = defaultdict(dict) for k, _v in sorted_dict.items(): - # stop early - if stop_key == k: - return state_dict object_versions = sorted_dict[k]["object_versions"] for canonical_name, versions in object_versions.items(): for version, object_metadata in versions.items(): @@ -122,6 +119,9 @@ def build_state(self, stop_key: Optional[str] = None) -> dict: state_dict[canonical_name][str(version)] = hash_str elif action == "remove": del state_dict[canonical_name][str(version)] + # stop early + if stop_key == k: + return state_dict return state_dict def diff_state(self, state: dict) -> tuple[dict, dict]: @@ -258,23 +258,17 @@ def supported_protocols(self) -> list[Union[int, str]]: supported.append(version) return supported - def calculate_supported_protocols(self) -> None: + def calculate_supported_protocols(self) -> dict: protocol_supported = {} # go through each historical protocol version for v, version_data in self.protocol_history.items(): # we assume its supported until we prove otherwise protocol_supported[v] = True # iterate through each object - for canonical_name, versions in version_data["object_versions"].items(): + for canonical_name, _ in version_data["object_versions"].items(): if canonical_name not in self.state: protocol_supported[v] = False break - # does the current source code state support this object - protocol_history_highest = int(max(versions)) - state_highest = int(max(self.state[canonical_name])) - if protocol_history_highest != state_highest: - protocol_supported[v] = False - break return protocol_supported def get_object_versions(self, protocol: Union[int, str]) -> list: @@ -304,7 +298,7 @@ def bump_protocol_version() -> Result[SyftSuccess, SyftError]: def migrate_args_and_kwargs( args: Tuple, kwargs: Dict, - to_protocol: Optional[int] = None, + to_protocol: Optional[PROTOCOL_TYPE] = None, to_latest_protocol: bool = False, ) -> Tuple[Tuple, Dict]: """Migrate args and kwargs to latest version for given protocol. @@ -325,7 +319,7 @@ def migrate_args_and_kwargs( if to_protocol == data_protocol.latest_version: return args, kwargs - protocol_state = data_protocol.state + protocol_state = data_protocol.build_state(stop_key=to_protocol) migrated_kwargs, migrated_args = {}, [] diff --git a/packages/syft/src/syft/service/action/action_object.py b/packages/syft/src/syft/service/action/action_object.py index 9aaa5f56a58..f713bb69c9a 100644 --- a/packages/syft/src/syft/service/action/action_object.py +++ b/packages/syft/src/syft/service/action/action_object.py @@ -449,6 +449,7 @@ def debox_args_and_kwargs(args: Any, kwargs: Any) -> Tuple[Any, Any]: ] +@serializable() class ActionObject(SyftObject): """Action object for remote execution.""" From 2ab2d36001c71d2ae8885e9279a7241712eeca44 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 16:48:27 +0530 Subject: [PATCH 40/67] type cast protocol version to string in build state --- packages/syft/src/syft/protocol/data_protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 65f56e4f629..6486fbb9547 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -319,7 +319,7 @@ def migrate_args_and_kwargs( if to_protocol == data_protocol.latest_version: return args, kwargs - protocol_state = data_protocol.build_state(stop_key=to_protocol) + protocol_state = data_protocol.build_state(stop_key=str(to_protocol)) migrated_kwargs, migrated_args = {}, [] From 630b8c882b3b8e4f456c31b49cfc8e06b9055bb2 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 17:23:30 +0530 Subject: [PATCH 41/67] fix annotation for communication type - handle dev while calculating communication protocol --- packages/syft/src/syft/client/api.py | 2 +- packages/syft/src/syft/client/client.py | 6 +++++- packages/syft/src/syft/node/node.py | 3 ++- packages/syft/src/syft/node/routes.py | 5 +++-- packages/syft/src/syft/protocol/data_protocol.py | 2 +- packages/syft/src/syft/protocol/protocol_state.json | 1 - 6 files changed, 12 insertions(+), 7 deletions(-) delete mode 100644 packages/syft/src/syft/protocol/protocol_state.json diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 90636045ded..6ed1a5d5501 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -490,7 +490,7 @@ class SyftAPI(SyftObject): @staticmethod def for_user( node: AbstractNode, - communication_protocol: int, + communication_protocol: PROTOCOL_TYPE, user_verify_key: Optional[SyftVerifyKey] = None, ) -> SyftAPI: # relative diff --git a/packages/syft/src/syft/client/client.py b/packages/syft/src/syft/client/client.py index 59f7966742a..f669ef66652 100644 --- a/packages/syft/src/syft/client/client.py +++ b/packages/syft/src/syft/client/client.py @@ -481,7 +481,9 @@ def post_init(self) -> None: self.metadata.supported_protocols ) - def _get_communication_protocol(self, protocols_supported_by_server: List) -> int: + def _get_communication_protocol( + self, protocols_supported_by_server: List + ) -> Union[int, str]: data_protocol: DataProtocol = get_data_protocol() protocols_supported_by_client: List[ PROTOCOL_TYPE @@ -497,6 +499,8 @@ def _get_communication_protocol(self, protocols_supported_by_server: List) -> in "No common communication protocol found between the client and the server." ) + if "dev" in common_protocols: + return "dev" return max(common_protocols) def create_project( diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 4e1f5025cfc..16d25811125 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -42,6 +42,7 @@ from ..client.api import SyftAPIData from ..client.api import debox_signed_syftapicall_response from ..external import OBLV +from ..protocol.data_protocol import PROTOCOL_TYPE from ..protocol.data_protocol import get_data_protocol from ..serde.deserialize import _deserialize from ..serde.serialize import _serialize @@ -849,7 +850,7 @@ def handle_api_call_with_unsigned_result( def get_api( self, for_user: Optional[SyftVerifyKey] = None, - communication_protocol: Optional[int] = None, + communication_protocol: Optional[PROTOCOL_TYPE] = None, ) -> SyftAPI: return SyftAPI.for_user( node=self, diff --git a/packages/syft/src/syft/node/routes.py b/packages/syft/src/syft/node/routes.py index 9bd90cb07c7..b9f7ffc396d 100644 --- a/packages/syft/src/syft/node/routes.py +++ b/packages/syft/src/syft/node/routes.py @@ -14,6 +14,7 @@ # relative from ..abstract_node import AbstractNode +from ..protocol.data_protocol import PROTOCOL_TYPE from ..serde.deserialize import _deserialize as deserialize from ..serde.serialize import _serialize as serialize from ..service.context import NodeServiceContext @@ -69,7 +70,7 @@ def syft_metadata_capnp() -> Response: ) def handle_syft_new_api( - user_verify_key: SyftVerifyKey, communication_protocol: int + user_verify_key: SyftVerifyKey, communication_protocol: PROTOCOL_TYPE ) -> Response: return Response( serialize( @@ -81,7 +82,7 @@ def handle_syft_new_api( # get the SyftAPI object @router.get("/api") def syft_new_api( - request: Request, verify_key: str, communication_protocol: int + request: Request, verify_key: str, communication_protocol: PROTOCOL_TYPE ) -> Response: user_verify_key: SyftVerifyKey = SyftVerifyKey.from_string(verify_key) if TRACE_MODE: diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 6486fbb9547..831059db77a 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -83,7 +83,7 @@ def save_history(self, history: dict) -> None: def latest_version(self) -> PROTOCOL_TYPE: sorted_versions = sorted(self.protocol_history.keys(), key=natural_key) if len(sorted_versions) > 0: - return int(sorted_versions[-1]) + return sorted_versions[-1] if self.has_dev else int(sorted_versions[-1]) return "dev" @staticmethod diff --git a/packages/syft/src/syft/protocol/protocol_state.json b/packages/syft/src/syft/protocol/protocol_state.json deleted file mode 100644 index 0967ef424bc..00000000000 --- a/packages/syft/src/syft/protocol/protocol_state.json +++ /dev/null @@ -1 +0,0 @@ -{} From c9bb48c506e498e6a8f86a0f7976f3bb92e2520a Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 17:48:06 +0530 Subject: [PATCH 42/67] downgrade signature if current protocol is dev but communication protocol is not --- packages/syft/src/syft/client/api.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 6ed1a5d5501..264192f9088 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -510,9 +510,13 @@ def for_user( # If server uses a higher protocol version than client, then # signatures needs to be downgraded. - signature_needs_downgrade = node.current_protocol != "dev" and int( - node.current_protocol - ) > int(communication_protocol) + if node.current_protocol == "dev" and communication_protocol != "dev": + # We assume dev is the highest staged protocol + signature_needs_downgrade = True + else: + signature_needs_downgrade = node.current_protocol != "dev" and int( + node.current_protocol + ) > int(communication_protocol) data_protocol = get_data_protocol() if signature_needs_downgrade: From 14259ccb4ee4fbbdf0f4098b7fdbece61cc46907 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 18:50:56 +0530 Subject: [PATCH 43/67] fix transform for nodemetadatajson nodemetadata version 2 --- packages/syft/src/syft/protocol/protocol_version.json | 9 ++++++++- packages/syft/src/syft/service/metadata/node_metadata.py | 4 ++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index e28df4357f8..161969ae55c 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -1,5 +1,5 @@ { - "1": { + "dev": { "object_versions": { "PartialSyftObject": { "1": { @@ -377,6 +377,13 @@ "action": "add" } }, + "ActionObject": { + "1": { + "version": 1, + "hash": "632446f1415102490c93fafb56dd9eb29d79623bcc5e9f2e6e37c4f63c2c51c3", + "action": "add" + } + }, "AnyActionObject": { "1": { "version": 1, diff --git a/packages/syft/src/syft/service/metadata/node_metadata.py b/packages/syft/src/syft/service/metadata/node_metadata.py index 748abac2ba1..a3b3922b36e 100644 --- a/packages/syft/src/syft/service/metadata/node_metadata.py +++ b/packages/syft/src/syft/service/metadata/node_metadata.py @@ -161,6 +161,8 @@ def metadata_to_json() -> List[Callable]: drop(["__canonical_name__"]), rename("__version__", "metadata_version"), convert_types(["id", "verify_key", "node_type"], str), + rename("highest_version", "highest_object_version"), + rename("lowest_version", "lowest_object_version"), ] @@ -170,4 +172,6 @@ def json_to_metadata() -> List[Callable]: drop(["metadata_version", "supported_protocols"]), convert_types(["id", "verify_key"], [UID, SyftVerifyKey]), convert_types(["node_type"], NodeType), + rename("highest_object_version", "highest_version"), + rename("lowest_object_version", "lowest_version"), ] From 1cc15ddaca0c34037e0999d3801d5c471cd54cd5 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 3 Oct 2023 19:01:15 +0530 Subject: [PATCH 44/67] fix incorrect import --- packages/syft/src/syft/types/syft_object.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 0017832be15..4bba57a6196 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -30,8 +30,8 @@ # relative from ..node.credentials import SyftVerifyKey -from ..serde import serialize from ..serde.recursive_primitives import recursive_serde_register_type +from ..serde.serialize import _serialize as serialize from ..util.autoreload import autoreload_enabled from ..util.markdown import as_markdown_python_code from ..util.notebook_ui.notebook_addons import create_table_template From 02daad1feeafb06de3b7b3f38e0c697823b862b1 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 4 Oct 2023 15:20:16 +0530 Subject: [PATCH 45/67] fix method for calc pending migrations --- packages/syft/src/syft/node/node.py | 32 ++++++++++++++----- .../object_search/migration_state_service.py | 2 +- .../object_search/object_migration_state.py | 3 +- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 16d25811125..8e1505bf2fa 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -285,6 +285,7 @@ def __init__( CodeHistoryService, MetadataService, BlobStorageService, + MigrateStateService, ] if services is None else services @@ -453,22 +454,35 @@ def root_client(self): root_client.api.refresh_api_callback() return root_client - def _validate_data_migration_state(self): + def _find_pending_migrations(self): partition_to_be_migrated = [] + + context = AuthedServiceContext( + node=self, + credentials=self.verify_key, + role=ServiceRole.ADMIN, + ) migration_state_service = self.get_service(MigrateStateService) - for partition_settings in self.document_store.partitions.values(): - object_type = partition_settings.object_type + + for partition in self.document_store.partitions.values(): + object_type = partition.settings.object_type canonical_name = object_type.__canonical_name__ - migration_state = migration_state_service.get_state(canonical_name) - if migration_state is not None: - if migration_state.current_version != migration_state.latest_version: - partition_to_be_migrated.append(canonical_name) + + migration_state = migration_state_service.get_state(context, canonical_name) + if ( + migration_state is not None + and migration_state.current_version != migration_state.latest_version + ): + partition_to_be_migrated.append(canonical_name) else: - migration_state.register_migration_state( + migration_state_service.register_migration_state( + context, current_version=object_type.__version__, canonical_name=canonical_name, ) + return partition_to_be_migrated + @property def guest_client(self): return self.get_guest_client() @@ -608,6 +622,7 @@ def _construct_services(self): CodeHistoryService, MetadataService, BlobStorageService, + MigrateStateService, ] if OBLV: @@ -617,6 +632,7 @@ def _construct_services(self): store_services += [OblvService] if service_klass in store_services: + print("Service class", service_klass) kwargs["store"] = self.document_store self.service_path_map[service_klass.__name__.lower()] = service_klass( **kwargs diff --git a/packages/syft/src/syft/service/object_search/migration_state_service.py b/packages/syft/src/syft/service/object_search/migration_state_service.py index 2adbd1478bd..fefb28dc60c 100644 --- a/packages/syft/src/syft/service/object_search/migration_state_service.py +++ b/packages/syft/src/syft/service/object_search/migration_state_service.py @@ -19,7 +19,7 @@ class MigrateStateService(AbstractService): def __init__(self, store: DocumentStore) -> None: self.store = store - self.stash: SyftMigrationStateStash = SyftMigrationStateStash(store=store) + self.stash = SyftMigrationStateStash(store=store) @service_method(path="migration", name="get_version") def get_version( diff --git a/packages/syft/src/syft/service/object_search/object_migration_state.py b/packages/syft/src/syft/service/object_search/object_migration_state.py index aab7749bea5..67ed85b803f 100644 --- a/packages/syft/src/syft/service/object_search/object_migration_state.py +++ b/packages/syft/src/syft/service/object_search/object_migration_state.py @@ -16,7 +16,6 @@ from ...types.syft_object import SyftMigrationRegistry from ...types.syft_object import SyftObject from ..action.action_permissions import ActionObjectPermission -from ..user.user import User @serializable() @@ -50,7 +49,7 @@ def supported_versions(self) -> List: class SyftMigrationStateStash(BaseStash): object_type = SyftObjectMigrationState settings: PartitionSettings = PartitionSettings( - name=User.__canonical_name__, + name=SyftObjectMigrationState.__canonical_name__, object_type=SyftObjectMigrationState, ) From ea626b8415160f0609325886ea7de6d3ac0cd970 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 4 Oct 2023 17:32:38 +0530 Subject: [PATCH 46/67] move version register to a class method in SyftMigrationRegistry --- packages/syft/src/syft/types/syft_object.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 4bba57a6196..9d0d3511baf 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -194,11 +194,14 @@ def __init_subclass__(cls, **kwargs: Any) -> None: """ super().__init_subclass__(**kwargs) klass = type(cls) if not isinstance(cls, type) else cls + cls.register_version(klass=klass) + @classmethod + def register_version(cls, klass: type): if hasattr(klass, "__canonical_name__") and hasattr(klass, "__version__"): mapping_string = klass.__canonical_name__ - klass_version = cls.__version__ - fqn = f"{cls.__module__}.{cls.__name__}" + klass_version = klass.__version__ + fqn = f"{klass.__module__}.{klass.__name__}" if ( mapping_string in cls.__migration_version_registry__ From 712eea8ef91a06f0aebe2909e4e06eb3dc0f1301 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 5 Oct 2023 10:33:46 +0530 Subject: [PATCH 47/67] add serde decorator to SyftObjectMigrationState --- .../src/syft/service/object_search/object_migration_state.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/syft/src/syft/service/object_search/object_migration_state.py b/packages/syft/src/syft/service/object_search/object_migration_state.py index 67ed85b803f..686c1ccb8fd 100644 --- a/packages/syft/src/syft/service/object_search/object_migration_state.py +++ b/packages/syft/src/syft/service/object_search/object_migration_state.py @@ -46,6 +46,7 @@ def supported_versions(self) -> List: KlassNamePartitionKey = PartitionKey(key="canonical_name", type_=str) +@serializable() class SyftMigrationStateStash(BaseStash): object_type = SyftObjectMigrationState settings: PartitionSettings = PartitionSettings( From abdbcbaba4af5d6e9af70bf71d16059be8462ec5 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 5 Oct 2023 16:23:30 +0530 Subject: [PATCH 48/67] Added a test for client and server using different communication protocol - handle iterables and result type objects during migrating arg/kwargs - remove print statements --- packages/syft/src/syft/client/api.py | 1 - packages/syft/src/syft/node/node.py | 1 - .../syft/src/syft/protocol/data_protocol.py | 75 ++++-- .../migrations/protocol_communication_test.py | 229 ++++++++++++++++++ 4 files changed, 283 insertions(+), 23 deletions(-) create mode 100644 packages/syft/tests/syft/migrations/protocol_communication_test.py diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index 62cdea59bf4..dab5f2f0e2a 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -427,7 +427,6 @@ def downgrade_signature(signature: Signature, object_versions: Dict): def unwrap_and_migrate_annotation(annotation, object_versions): args = get_args(annotation) if len(args) == 0: - print(annotation) if ( isinstance(annotation, type) and issubclass(annotation, SyftBaseObject) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 8e1505bf2fa..602aeaeb2a9 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -632,7 +632,6 @@ def _construct_services(self): store_services += [OblvService] if service_klass in store_services: - print("Service class", service_klass) kwargs["store"] = self.document_store self.service_path_map[service_klass.__name__.lower()] = service_klass( **kwargs diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index 831059db77a..cdc688df946 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -5,6 +5,7 @@ import os from pathlib import Path import re +from typing import Any from typing import Dict from typing import Optional from typing import Tuple @@ -12,6 +13,7 @@ from typing import Union # third party +from result import OkErr from result import Result # relative @@ -295,6 +297,48 @@ def bump_protocol_version() -> Result[SyftSuccess, SyftError]: return data_protocol.bump_protocol_version() +def debox_arg_and_migrate(arg: Any, protocol_state: dict): + """Debox the argument based on whether it is iterable or single entity.""" + box_to_result_type = None + + if type(arg) in OkErr: + box_to_result_type = type(arg) + arg = arg.value + + single_entity = False + is_tuple = isinstance(arg, tuple) + + if isinstance(arg, (list, tuple)): + iterable_keys = range(len(arg)) + arg = list(arg) + elif isinstance(arg, dict): + iterable_keys = arg.keys() + else: + iterable_keys = range(1) + arg = [arg] + single_entity = True + + for key in iterable_keys: + _object = arg[key] + if isinstance(_object, SyftBaseObject): + current_version = int(_object.__version__) + migrate_to_version = int(max(protocol_state[_object.__canonical_name__])) + if current_version > migrate_to_version: # downgrade + versions = range(current_version - 1, migrate_to_version - 1, -1) + else: # upgrade + versions = range(current_version + 1, migrate_to_version + 1) + for version in versions: + _object = _object.migrate_to(version) + arg[key] = _object + + wrapped_arg = arg[0] if single_entity else arg + wrapped_arg = tuple(wrapped_arg) if is_tuple else wrapped_arg + if box_to_result_type is not None: + wrapped_arg = box_to_result_type(wrapped_arg) + + return wrapped_arg + + def migrate_args_and_kwargs( args: Tuple, kwargs: Dict, @@ -324,28 +368,17 @@ def migrate_args_and_kwargs( migrated_kwargs, migrated_args = {}, [] for param_name, param_val in kwargs.items(): - if isinstance(param_val, SyftBaseObject): - current_version = int(param_val.__version__) - migrate_to_version = int(max(protocol_state[param_val.__canonical_name__])) - if current_version > migrate_to_version: # downgrade - versions = range(current_version - 1, migrate_to_version - 1, -1) - else: # upgrade - versions = range(current_version + 1, migrate_to_version + 1) - for version in versions: - param_val = param_val.migrate_to(version) - migrated_kwargs[param_name] = param_val + migrated_val = debox_arg_and_migrate( + arg=param_val, + protocol_state=protocol_state, + ) + migrated_kwargs[param_name] = migrated_val for arg in args: - if isinstance(arg, SyftBaseObject): - current_version = int(arg.__version__) - migrate_to_version = int(max(protocol_state[arg.__canonical_name__])) - if current_version > migrate_to_version: # downgrade - versions = range(current_version - 1, migrate_to_version - 1, -1) - else: # upgrade - versions = range(current_version + 1, migrate_to_version + 1) - for version in versions: - arg = arg.migrate_to(version) - - migrated_args.append(arg) + migrated_val = debox_arg_and_migrate( + arg=arg, + protocol_state=protocol_state, + ) + migrated_args.append(migrated_val) return tuple(migrated_args), migrated_kwargs diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py new file mode 100644 index 00000000000..ce6966a4ac1 --- /dev/null +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -0,0 +1,229 @@ +# stdlib +from typing import List +from typing import Type +from typing import Union +from unittest import mock + +# third party +import pytest + +# syft absolute +import syft as sy +from syft.node.worker import Worker +from syft.protocol.data_protocol import get_data_protocol +from syft.serde.serializable import serializable +from syft.service.context import AuthedServiceContext +from syft.service.response import SyftError +from syft.service.service import AbstractService +from syft.service.service import service_method +from syft.service.user.user_roles import GUEST_ROLE_LEVEL +from syft.store.document_store import BaseStash +from syft.store.document_store import DocumentStore +from syft.store.document_store import PartitionSettings +from syft.types.syft_migration import migrate +from syft.types.syft_object import SYFT_OBJECT_VERSION_1 +from syft.types.syft_object import SYFT_OBJECT_VERSION_2 +from syft.types.syft_object import SyftBaseObject +from syft.types.syft_object import SyftObject +from syft.types.transforms import convert_types +from syft.types.transforms import rename +from syft.types.uid import UID +from syft.util.util import index_syft_by_module_name + + +def get_klass_version_1(): + @serializable() + class SyftMockObjectTestV1(SyftObject): + __canonical_name__ = "SyftMockObjectTest" + __version__ = SYFT_OBJECT_VERSION_1 + + id: UID + name: str + version: int + + return SyftMockObjectTestV1 + + +def get_klass_version_2(): + @serializable() + class SyftMockObjectTestV2(SyftObject): + __canonical_name__ = "SyftMockObjectTest" + __version__ = SYFT_OBJECT_VERSION_2 + + id: UID + full_name: str + version: str + + return SyftMockObjectTestV2 + + +def setup_migration_transforms(mock_klass_v1, mock_klass_v2): + @migrate(mock_klass_v1, mock_klass_v2) + def mock_v1_to_v2(): + return [rename("name", "full_name"), convert_types(["version"], str)] + + @migrate(mock_klass_v2, mock_klass_v1) + def mock_v2_to_v1(): + return [rename("full_name", "name"), convert_types(["version"], int)] + + return mock_v1_to_v2, mock_v2_to_v1 + + +def get_stash_klass(syft_object: Type[SyftBaseObject]): + class SyftMockObjectStash(BaseStash): + object_type = syft_object + settings: PartitionSettings = PartitionSettings( + name=object_type.__canonical_name__, + object_type=syft_object, + ) + + def __init__(self, store: DocumentStore) -> None: + super().__init__(store=store) + + return SyftMockObjectStash + + +def setup_service_method(syft_object): + stash_klass: BaseStash = get_stash_klass(syft_object=syft_object) + + @serializable() + class SyftMockObjectService(AbstractService): + store: DocumentStore + stash: stash_klass + + def __init__(self, store: DocumentStore) -> None: + self.store = store + self.stash = stash_klass(store=store) + + @service_method( + path="dummy.syft_object", + name="get", + roles=GUEST_ROLE_LEVEL, + ) + def get( + self, context: AuthedServiceContext + ) -> Union[List[syft_object], SyftError]: + result = self.stash.get_all(context.credentials, has_permission=True) + if result.is_ok(): + return result.ok() + return SyftError(message=f"{result.err()}") + + return SyftMockObjectService + + +def setup_version_one(node_name: str): + syft_klass_version_one = get_klass_version_1() + + sy.stage_protocol_changes() + sy.bump_protocol_version() + + syft_service_klass = setup_service_method( + syft_object=syft_klass_version_one, + ) + + node = sy.orchestra.launch(node_name, dev_mode=True, reset=True) + + worker: Worker = node.python_node + + worker.services.append(syft_service_klass) + worker.service_path_map[syft_service_klass.__name__.lower()] = syft_service_klass( + store=worker.document_store + ) + + return node, syft_klass_version_one + + +def setup_version_second(node_name: str, klass_version_one: type): + syft_klass_version_second = get_klass_version_2() + setup_migration_transforms(klass_version_one, syft_klass_version_second) + + sy.stage_protocol_changes() + sy.bump_protocol_version() + + syft_service_klass = setup_service_method(syft_object=syft_klass_version_second) + + node = sy.orchestra.launch(node_name, dev_mode=True) + + worker: Worker = node.python_node + + worker.services.append(syft_service_klass) + worker.service_path_map[syft_service_klass.__name__.lower()] = syft_service_klass( + store=worker.document_store + ) + + return node, syft_klass_version_second + + +@pytest.fixture +def dp_fixture(): + dp = get_data_protocol() + yield + dp.save_history(dp.protocol_history) + + +def test_client_server_running_different_protocols(dp_fixture): + node_name = UID().to_string() + + # Setup mock object version one + nh1, klass_v1 = setup_version_one(node_name) + assert klass_v1.__canonical_name__ == "SyftMockObjectTest" + assert klass_v1.__name__ == "SyftMockObjectTestV1" + + nh1_client = nh1.client + assert nh1_client is not None + result_from_client_1 = nh1_client.api.services.dummy.get() + + protocol_version_with_mock_obj_v1 = get_data_protocol().latest_version + + # No data saved + assert len(result_from_client_1) == 0 + + # Setup mock object version second + nh2, klass_v2 = setup_version_second(node_name, klass_version_one=klass_v1) + + # Create a sample data in version second + sample_data = klass_v2(full_name="John", version=str(1), id=UID()) + + assert isinstance(sample_data, klass_v2) + + # Validate migrations + sample_data_v1 = sample_data.migrate_to(version=protocol_version_with_mock_obj_v1) + assert sample_data_v1.name == sample_data.full_name + assert sample_data_v1.version == int(sample_data.version) + + # Set the sample data in version second + service_klass = nh1.python_node.get_service("SyftMockObjectService") + service_klass.stash.set( + nh1.python_node.root_client.verify_key, + sample_data, + ) + + # patch the index syft module function + def patched_index_syft_by_module_name(fully_qualified_name: str) -> object: + if klass_v1.__name__ in fully_qualified_name: + return klass_v1 + elif klass_v2.__name__ in fully_qualified_name: + return klass_v2 + + return index_syft_by_module_name(fully_qualified_name) + + with mock.patch( + "syft.client.api.index_syft_by_module_name", + patched_index_syft_by_module_name, + ): + nh2_client = nh2.client + assert nh2_client is not None + # Force communication protocol to when version object is defined + nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 + # Reset api + nh2_client._api = None + + # Call the API with an older communication protocol version + result2 = nh2_client.api.services.dummy.get() + assert isinstance(result2, list) + + # Validate the data received + for data in result2: + assert isinstance(data, klass_v1) + assert data.name == sample_data.full_name + assert data.version == int(sample_data.version) From 9236570f424b340f5f78a52b82dbf1c4a0df8785 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 6 Oct 2023 10:25:49 +0530 Subject: [PATCH 49/67] update object versioning state --- packages/syft/src/syft/protocol/protocol_version.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 161969ae55c..863fec82e95 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -324,21 +324,21 @@ "Asset": { "1": { "version": 1, - "hash": "655a8b9ca076bc7cb48833954ccc86634d78d24f76c77d3d9de1c677a00415d7", + "hash": "f662eed2dfa3bb87f2502e74be5c19decdceab08f809e2d0709cfc6bb838b556", "action": "add" } }, "CreateAsset": { "1": { "version": 1, - "hash": "c94660b20a96ee4347210d2ee9ea5c60a221e55e2921ea5ea28a1ee606020ffb", + "hash": "1b4c71569b8da64258672483bd36dc4aa99a32d4cb519659241d15bc898041a6", "action": "add" } }, "Dataset": { "1": { "version": 1, - "hash": "ab8fec7aa0a422a7b55209bbe87991d7085ef70df9dcf1c3538ab9a355f1554d", + "hash": "99ca2fa3e46fd9810222d269fac6accb546f632e94d5d57529016ba5e55af5a8", "action": "add" } }, @@ -352,7 +352,7 @@ "CreateDataset": { "1": { "version": 1, - "hash": "e47f467815eaf6a40762407da0debaeac5093e36b76e54d630486fe43c1d93d6", + "hash": "3b020d9b8928cbd7e91f41c749ab4c932e19520696a183f2c7cd1312ebb640d1", "action": "add" } }, From ffc0aee597677ce10ebe8b853186ed7cc0ea1d9e Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 6 Oct 2023 14:12:13 +0530 Subject: [PATCH 50/67] update protocol version --- packages/syft/src/syft/protocol/protocol_version.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 863fec82e95..1662686657b 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -324,7 +324,7 @@ "Asset": { "1": { "version": 1, - "hash": "f662eed2dfa3bb87f2502e74be5c19decdceab08f809e2d0709cfc6bb838b556", + "hash": "24350b8d9597df49999918ad42e0eece1328ea30389311f1e0a420be8f39b8a1", "action": "add" } }, From 3ba9fab52838f12d2e0a028c58cf65512f6563a3 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 6 Oct 2023 16:38:09 +0530 Subject: [PATCH 51/67] stage protocol changes in dev mode during app reload - reset protocol_version.py - stage protocol when running in python mode - stage protocol change on running unit tests --- packages/grid/backend/grid/core/config.py | 1 + packages/grid/backend/grid/main.py | 9 + packages/hagrid/hagrid/orchestra.py | 5 + .../src/syft/protocol/protocol_version.json | 782 +----------------- tox.ini | 2 + 5 files changed, 18 insertions(+), 781 deletions(-) diff --git a/packages/grid/backend/grid/core/config.py b/packages/grid/backend/grid/core/config.py index 47373b66bb9..0b9948ca961 100644 --- a/packages/grid/backend/grid/core/config.py +++ b/packages/grid/backend/grid/core/config.py @@ -111,6 +111,7 @@ def get_emails_enabled(cls, v: bool, values: Dict[str, Any]) -> bool: True if os.getenv("TEST_MODE", "false").lower() == "true" else False ) ASSOCIATION_TIMEOUT: int = 10 + DEV_MODE: bool = True if os.getenv("DEV_MODE", "false").lower() == "true" else False class Config: case_sensitive = True diff --git a/packages/grid/backend/grid/main.py b/packages/grid/backend/grid/main.py index 4fc0b35a946..bc70d91a555 100644 --- a/packages/grid/backend/grid/main.py +++ b/packages/grid/backend/grid/main.py @@ -31,6 +31,15 @@ app.include_router(api_router, prefix=settings.API_V2_STR) +if settings.DEV_MODE: + # syft absolute + from syft.protocol.data_protocol import stage_protocol_changes + + print("Staging protocol changes...") + status = stage_protocol_changes() + print(status) + + # needed for Google Kubernetes Engine LoadBalancer Healthcheck @app.get( "/", diff --git a/packages/hagrid/hagrid/orchestra.py b/packages/hagrid/hagrid/orchestra.py index 9a9de5fd43b..da87600e13b 100644 --- a/packages/hagrid/hagrid/orchestra.py +++ b/packages/hagrid/hagrid/orchestra.py @@ -26,6 +26,7 @@ # syft absolute from syft.abstract_node import NodeSideType from syft.abstract_node import NodeType + from syft.protocol.data_protocol import stage_protocol_changes from syft.service.response import SyftError except Exception: # nosec # print("Please install syft with `pip install syft`") @@ -248,6 +249,10 @@ def deploy_to_python( if hasattr(NodeType, "GATEWAY"): worker_classes[NodeType.GATEWAY] = sy.Gateway + if dev_mode: + print("Staging Protocol Changes...") + stage_protocol_changes() + if port: if port == "auto": # dont use default port to prevent port clashes in CI diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 1662686657b..0967ef424bc 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -1,781 +1 @@ -{ - "dev": { - "object_versions": { - "PartialSyftObject": { - "1": { - "version": 1, - "hash": "008917584d8e1c09015cdbef02f59c0622f48e0618877c1b44425c8846befc13", - "action": "add" - } - }, - "NodeMetadataUpdate": { - "1": { - "version": 1, - "hash": "569d124c23590360bda240c19b53314ccc6204c5d1ab0d2898976a028e002191", - "action": "add" - } - }, - "NodeMetadata": { - "1": { - "version": 1, - "hash": "6bee018894dfdf697ea624740d0bf051750e0b0d8470ced59646f6d8812068ac", - "action": "add" - }, - "2": { - "version": 2, - "hash": "f856169fea72486cd436875ce4411ef935da11eb7c5af48121adfa00d4c0cdb6", - "action": "add" - } - }, - "LinkedObject": { - "1": { - "version": 1, - "hash": "824567c6933c095d0e2f6995c8de3581c0fbd2e9e4ead35c8159f7964709c28e", - "action": "add" - } - }, - "BaseConfig": { - "1": { - "version": 1, - "hash": "4e5257080ce615aa4122b02bad8487e4c7d6d0f171ff77abbc9e8cd3e33df89a", - "action": "add" - } - }, - "ServiceConfig": { - "1": { - "version": 1, - "hash": "ca91f59bf045d949d82860f7d52655bfbede4cf6bdc5bae8f847f08a16f05d74", - "action": "add" - } - }, - "LibConfig": { - "1": { - "version": 1, - "hash": "c6ff229aea16874c5d9ae4d1f9e500d13f5cf984bbcee7abd16c5841707a2f78", - "action": "add" - } - }, - "APIEndpoint": { - "1": { - "version": 1, - "hash": "c0e83867b107113e6fed06364ba364c24b2f4af35b15a3869b176318d3be7989", - "action": "add" - } - }, - "LibEndpoint": { - "1": { - "version": 1, - "hash": "153eac6d8990774eebfffaa75a9895e7c4e1a0e09465d5da0baf4c3a3b03369d", - "action": "add" - } - }, - "SignedSyftAPICall": { - "1": { - "version": 1, - "hash": "e66a116de2fa44ebdd0d4c2d7d5a047dedb555fd201a0f431cd8017d9d33a61d", - "action": "add" - } - }, - "SyftAPICall": { - "1": { - "version": 1, - "hash": "014bd1d0933f6070888a313edba239170759de24eae49bf2374c1be4dbe2b4d7", - "action": "add" - } - }, - "SyftAPIData": { - "1": { - "version": 1, - "hash": "db101a75227e34750d7056785a1e87bb2e8ad6604f19c372d0cb6aa437243bf5", - "action": "add" - } - }, - "SyftAPI": { - "1": { - "version": 1, - "hash": "2bba1d9fcf677a58e35bf903de3da22ee4913af138aa3012af9c46b3609579cd", - "action": "add" - } - }, - "User": { - "1": { - "version": 1, - "hash": "078636e64f737e60245b39cf348d30fb006531e80c12b70aa7cf98254e1bb37a", - "action": "add" - } - }, - "UserUpdate": { - "1": { - "version": 1, - "hash": "839dd90aeb611e1dc471c8fd6daf230e913465c0625c6a297079cb7f0a271195", - "action": "add" - } - }, - "UserCreate": { - "1": { - "version": 1, - "hash": "dab78b63544ae91c09f9843c323cb237c0a6fcfeb71c1acf5f738e2fcf5c277f", - "action": "add" - } - }, - "UserSearch": { - "1": { - "version": 1, - "hash": "69d1e10b81c8a4143cf70e4f911d8562732af2458ebbc455ca64542f11373dd1", - "action": "add" - } - }, - "UserView": { - "1": { - "version": 1, - "hash": "63289383fe7e7584652f242a4362ce6e2f0ade52f6416ab6149b326a506b0675", - "action": "add" - } - }, - "UserViewPage": { - "1": { - "version": 1, - "hash": "16dac6209b19a934d286ef1efa874379e0040c324e71023c57d1bc6d2d367171", - "action": "add" - } - }, - "UserPrivateKey": { - "1": { - "version": 1, - "hash": "7cb196587887f0f3bffb298dd9f3b88509e9b2748792bf8dc03bdd0d6b98714a", - "action": "add" - } - }, - "StoreConfig": { - "1": { - "version": 1, - "hash": "17de8875cf590311ddb042140347ffc79d4a85028e504dad178ca4e1237ec861", - "action": "add" - } - }, - "NodeSettingsUpdate": { - "1": { - "version": 1, - "hash": "b6ddc66ff270a3c2c4760e31e1a55d72ed04ccae2d0115ebe2fba6f2bf9bd119", - "action": "add" - } - }, - "NodeSettings": { - "1": { - "version": 1, - "hash": "b662047bb278f4f5db77c102f94b733c3a929839271b3d6b82ea174a60e2aaf0", - "action": "add" - } - }, - "HTTPConnection": { - "1": { - "version": 1, - "hash": "5ee19eaf55ecbe7945ea45924c036ec0f500114a2f64176620961a8c2ec94cdb", - "action": "add" - } - }, - "PythonConnection": { - "1": { - "version": 1, - "hash": "011946fc9af0a6987f5c7bc9b0208b2fae9d65217531430bced7ba542788da1a", - "action": "add" - } - }, - "DateTime": { - "1": { - "version": 1, - "hash": "7e9d89309a10d2110a7ae4f97d8f25a7914853269e8fa0c531630790c1253f17", - "action": "add" - } - }, - "BlobFile": { - "1": { - "version": 1, - "hash": "47ed55183d619c6c624e35412360a41de42833e2c24223c1de1ad12a84fdafc2", - "action": "add" - } - }, - "SecureFilePathLocation": { - "1": { - "version": 1, - "hash": "7febc066e2ee5a3a4a891720afede3f5c155cacc0557662ac4d04bf67b964c6d", - "action": "add" - } - }, - "SeaweedSecureFilePathLocation": { - "1": { - "version": 1, - "hash": "5724a38b1a92b8a55da3d9cc34a720365a6d0c32683acda630fc44067173e201", - "action": "add" - } - }, - "BlobStorageEntry": { - "1": { - "version": 1, - "hash": "9f1b027cce390ee6f71c7a81e7420bb71a477b29c6c62ba74e781a97bc5434e6", - "action": "add" - } - }, - "BlobStorageMetadata": { - "1": { - "version": 1, - "hash": "6888943be3f97186190dd26d7eefbdf29b15c6f2fa459e13608065ebcdb799e2", - "action": "add" - } - }, - "CreateBlobStorageEntry": { - "1": { - "version": 1, - "hash": "61a373336e83645f1b6d78a320323d9ea4ee91b3d87b730cb0608fbfa0072262", - "action": "add" - } - }, - "BlobRetrieval": { - "1": { - "version": 1, - "hash": "a8d7e1d6483e7a9b5a130e837fa398862aa6cbb316cc5f4470450d835755fdd9", - "action": "add" - } - }, - "SyftObjectRetrieval": { - "1": { - "version": 1, - "hash": "7ccc62d5b434d2d438b3df661b4d753b0c7c8d593d451d8b86d364da83998c89", - "action": "add" - } - }, - "BlobRetrievalByURL": { - "1": { - "version": 1, - "hash": "18fd860cb9de296532fc9ff075932e6a4377cc8f043dd88ed4f620517321077d", - "action": "add" - } - }, - "BlobDeposit": { - "1": { - "version": 1, - "hash": "c98e6da658a3be01ead4ea6ee6a4c10046879f0ce0f5fc5f946346671579b229", - "action": "add" - } - }, - "WorkerSettings": { - "1": { - "version": 1, - "hash": "0dcd95422ec8a7c74e45ee68a125084c08f898dc94a13d25fe5a5fd0e4fc5027", - "action": "add" - } - }, - "HTTPNodeRoute": { - "1": { - "version": 1, - "hash": "199423cebb9427d22a5c0e4e2210230be6c64d2996aa35a1d6f7677d0ebf945d", - "action": "add" - } - }, - "PythonNodeRoute": { - "1": { - "version": 1, - "hash": "4ad5eccc9e849c81bd7a6f18199cd434a7eab107c07a6ff3a870d561ae99e69e", - "action": "add" - } - }, - "EnclaveMetadata": { - "1": { - "version": 1, - "hash": "39f85e475015e6f860ddcc5fea819423eba2db8f4b7d8e004c05a44d6f8444c6", - "action": "add" - } - }, - "DataSubject": { - "1": { - "version": 1, - "hash": "0b8b049d4627727b444c419f5d6a97b7cb97a433088ebf744c854b6a470dadf1", - "action": "add" - } - }, - "DataSubjectCreate": { - "1": { - "version": 1, - "hash": "5a94f9fcba75c50d78d71222f0235c5fd4d8003ae0db4d74bdbc4d56a99de3aa", - "action": "add" - } - }, - "DataSubjectMemberRelationship": { - "1": { - "version": 1, - "hash": "0a820edc9f1a87387acc3c611fe852752fcb3dab7608058f2bc48211be7bfbd2", - "action": "add" - } - }, - "Contributor": { - "1": { - "version": 1, - "hash": "d1d4f25bb87e59c0414501d3335097de66815c164c9ed5a7850ff8bec69fbcdc", - "action": "add" - } - }, - "MarkdownDescription": { - "1": { - "version": 1, - "hash": "519328a3952049f57004013e4fb00840695b24b8575cad983056412c9c9d9ba6", - "action": "add" - } - }, - "Asset": { - "1": { - "version": 1, - "hash": "24350b8d9597df49999918ad42e0eece1328ea30389311f1e0a420be8f39b8a1", - "action": "add" - } - }, - "CreateAsset": { - "1": { - "version": 1, - "hash": "1b4c71569b8da64258672483bd36dc4aa99a32d4cb519659241d15bc898041a6", - "action": "add" - } - }, - "Dataset": { - "1": { - "version": 1, - "hash": "99ca2fa3e46fd9810222d269fac6accb546f632e94d5d57529016ba5e55af5a8", - "action": "add" - } - }, - "DatasetPageView": { - "1": { - "version": 1, - "hash": "7e7403e5de953b0730569861eb2e0df7a63b4360726c4b6b6939be09ad16df2a", - "action": "add" - } - }, - "CreateDataset": { - "1": { - "version": 1, - "hash": "3b020d9b8928cbd7e91f41c749ab4c932e19520696a183f2c7cd1312ebb640d1", - "action": "add" - } - }, - "ActionDataEmpty": { - "1": { - "version": 1, - "hash": "89b5912fe5416f922051b8068be6071a03c87a4ab264959de524f1b86e95f028", - "action": "add" - } - }, - "ActionFileData": { - "1": { - "version": 1, - "hash": "1f32d94b75b0a6b4e86cec93d94aa905738219e3e7e75f51dd335ee832a6ed3e", - "action": "add" - } - }, - "Action": { - "1": { - "version": 1, - "hash": "5cf71ee35097f17fbb1dd05096f875211d71cf07161205d7f6a9c11fd49d5272", - "action": "add" - } - }, - "ActionObject": { - "1": { - "version": 1, - "hash": "632446f1415102490c93fafb56dd9eb29d79623bcc5e9f2e6e37c4f63c2c51c3", - "action": "add" - } - }, - "AnyActionObject": { - "1": { - "version": 1, - "hash": "bcb31f847907edc9c95d2d120dc5427854604f40940e3f41cd0474a1820ac65e", - "action": "add" - } - }, - "TwinObject": { - "1": { - "version": 1, - "hash": "c42455586b43724a7421becd99122b787a129798daf6081e96954ecaea228099", - "action": "add" - } - }, - "ExactMatch": { - "1": { - "version": 1, - "hash": "e497e2e2380db72766c5e219e8afd13136d8953933d6f1eaf83b14001e887cde", - "action": "add" - } - }, - "OutputHistory": { - "1": { - "version": 1, - "hash": "4ec6e6efd86a972b474251885151bdfe4ef262562174605e8ab6a8abba1aa867", - "action": "add" - } - }, - "OutputPolicyExecuteCount": { - "1": { - "version": 1, - "hash": "6bb24b3b35e19564c43b838ca3f46ccdeadb6596511917f2d220681a378e439d", - "action": "add" - } - }, - "OutputPolicyExecuteOnce": { - "1": { - "version": 1, - "hash": "32a40fc9966b277528eebc61c01041f3a5447417731954abdaffbb14dabc76bb", - "action": "add" - } - }, - "UserPolicy": { - "1": { - "version": 1, - "hash": "c69b17b1d96cace8b45da6d9639165f2da4aa7ff156b6fd922ac217bf7856d8a", - "action": "add" - } - }, - "SubmitUserPolicy": { - "1": { - "version": 1, - "hash": "96f7f39279fadc70c569b8d48ed4d6420a8132db51e37466d272fda19953554b", - "action": "add" - } - }, - "UserCode": { - "1": { - "version": 1, - "hash": "e14c22686cdc7d1fb2b0d01c0aebdea37e62a61b051677c1d30234214f05cd42", - "action": "add" - } - }, - "SubmitUserCode": { - "1": { - "version": 1, - "hash": "f572d32350d09e25b29572c591029d37a216818618c383094404f84bc9c15dd6", - "action": "add" - } - }, - "UserCodeExecutionResult": { - "1": { - "version": 1, - "hash": "49c32e85e78b7b189a7f13b7e26115ef94fcb0b60b578adcbe2b95e289f63a6e", - "action": "add" - } - }, - "CodeHistory": { - "1": { - "version": 1, - "hash": "a7baae93862ae0aa67675f1617574e31aafb15a9ebff633eb817278a3a867161", - "action": "add" - } - }, - "CodeHistoryView": { - "1": { - "version": 1, - "hash": "0ed1a2a04a962ecbcfa38b0b8a03c1e51e8946a4b80f6bf2557148ce658671ce", - "action": "add" - } - }, - "CodeHistoriesDict": { - "1": { - "version": 1, - "hash": "95288411cd5843834f3273a2fd66a7df2e603e980f4ab1d329f9ab17d5d2f643", - "action": "add" - } - }, - "UsersCodeHistoriesDict": { - "1": { - "version": 1, - "hash": "5e1f389c4565ee8558386dd5c934d81e0c68ab1434f86bb9065976b587ef44d1", - "action": "add" - } - }, - "NodePeer": { - "1": { - "version": 1, - "hash": "50c5b7867d450c1af8011830339c07f5e7bd92589daeac976e0ab86151082cbc", - "action": "add" - } - }, - "CommandReport": { - "1": { - "version": 1, - "hash": "81c6f248e89f6191f75afb7170f82b616377ae46c1d809f6047e13c6f2f299d5", - "action": "add" - } - }, - "CommandResult": { - "1": { - "version": 1, - "hash": "65588691901dc0562afb650b0abe04fe6e3e3db516abda3ec82a371ce459ed0d", - "action": "add" - } - }, - "VPNClientConnection": { - "1": { - "version": 1, - "hash": "cf12dcf5066e6c441e6841ae24bd295d8331e7920c79473bfabc17c5e47cf79e", - "action": "add" - } - }, - "HeadscaleAuthToken": { - "1": { - "version": 1, - "hash": "d88dbe672feb126eb1c5f36208edb1effcfe8721fb6619a9ac62ed3fff8f1546", - "action": "add" - } - }, - "TailscalePeer": { - "1": { - "version": 1, - "hash": "603da3a1087e8d80a6b7e4cd6ccaccd7e1baf3ec77265d810e3a9e9cd233ac04", - "action": "add" - } - }, - "TailscaleStatus": { - "1": { - "version": 1, - "hash": "52a7bd4b72f160a8b14466e98bb2b2a70c4f49aaff635e844fa40e955f7d76d2", - "action": "add" - } - }, - "OnDiskBlobDeposit": { - "1": { - "version": 1, - "hash": "5efc230c1ee65c4626d334aa69ed458c796c45265e546a333844c6c2bcd0e6b0", - "action": "add" - } - }, - "SeaweedFSBlobDeposit": { - "1": { - "version": 1, - "hash": "382a9ac178deed2a9591e1ebbb39f265cbe67027fb93a420d473a4c26b7fda11", - "action": "add" - } - }, - "DictStoreConfig": { - "1": { - "version": 1, - "hash": "256e9c623ce0becd555ddd2a55a0c15514e162786b1549388cef98a92a9b18c9", - "action": "add" - } - }, - "NumpyArrayObject": { - "1": { - "version": 1, - "hash": "dcc7b44fa5ad22ae0bc576948f856c172dac1e9de2bc8e2a302e428f3309a278", - "action": "add" - } - }, - "NumpyScalarObject": { - "1": { - "version": 1, - "hash": "5c1b6b6e8ba88bc79e76646d621489b889fe8f9b9fd59f117d594be18a409633", - "action": "add" - } - }, - "NumpyBoolObject": { - "1": { - "version": 1, - "hash": "a5c822a6a3ca9eefd6a2b68f7fd0bc614fba7995f6bcc30bdc9dc882296b9b16", - "action": "add" - } - }, - "PandasDataframeObject": { - "1": { - "version": 1, - "hash": "35058924b3de2e0a604a92f91f4dd2e3cc0dac80c219d34f360e7cedd52f5f4c", - "action": "add" - } - }, - "PandasSeriesObject": { - "1": { - "version": 1, - "hash": "2a0d8a55f1c27bd8fccd276cbe01bf272c40cab10417d7027273983fed423caa", - "action": "add" - } - }, - "ReplyNotification": { - "1": { - "version": 1, - "hash": "34b2ad522f7406c2486573467d9c7acef5c1063a0d9f2177c3bda2d8c4f87572", - "action": "add" - } - }, - "Notification": { - "1": { - "version": 1, - "hash": "d13981f721fe2b3e2717640ee07dc716c596e4ecd442461665c3fdab0b85bf0e", - "action": "add" - } - }, - "CreateNotification": { - "1": { - "version": 1, - "hash": "b1f459de374fe674f873a4a5f3fb8a8aabe0d83faad84a933f0a77dd1141159a", - "action": "add" - } - }, - "Change": { - "1": { - "version": 1, - "hash": "aefebd1601cf5bfd4817b0db75300a78299cc4949ead735a90873cbd22c8d4bc", - "action": "add" - } - }, - "ChangeStatus": { - "1": { - "version": 1, - "hash": "627f6f8e42cc285336aa6fd4916285d796140f4ff901487b7cb3907ef0f116a6", - "action": "add" - } - }, - "ActionStoreChange": { - "1": { - "version": 1, - "hash": "17b865e75eb3fb2693924fb00ba87a25260be45d55a4eb2184c4ead22d787cbe", - "action": "add" - } - }, - "Request": { - "1": { - "version": 1, - "hash": "e054307eeb7f13683cde9ce7613d5ca2925a13fff7c345b1c9f729a12c955f90", - "action": "add" - } - }, - "RequestInfo": { - "1": { - "version": 1, - "hash": "b76075c138afc0563ce9ac7f6b1131f048951f7486cd516c02736dc1a2a23639", - "action": "add" - } - }, - "RequestInfoFilter": { - "1": { - "version": 1, - "hash": "7103abdc464ae71bb746410f5730f55dd8ed82268aa32bbb0a69e0070488a669", - "action": "add" - } - }, - "SubmitRequest": { - "1": { - "version": 1, - "hash": "96b4ec12beafd9d8a7c97399cb8a23dade4db16d8f521be3fe7b8fec99db5161", - "action": "add" - } - }, - "ObjectMutation": { - "1": { - "version": 1, - "hash": "0ee3dd38d6df0fe9a19d848e8f3aaaf13a6ba86afe3406c239caed6da185651a", - "action": "add" - } - }, - "EnumMutation": { - "1": { - "version": 1, - "hash": "4c02f956ec9b973064972cc57fc8dd9c525e683f93f804642b4e1bfee1b62e57", - "action": "add" - } - }, - "UserCodeStatusChange": { - "1": { - "version": 1, - "hash": "4f5b405cc2b3976ed8f7018df82e873435d9187dff15fa5a23bc85a738969f3f", - "action": "add" - } - }, - "SyftObjectMigrationState": { - "1": { - "version": 1, - "hash": "d3c8126bc15dae4dd243bb035530e3f56cd9e433d403dd6b5f3b45face6d281f", - "action": "add" - } - }, - "ProjectThreadMessage": { - "1": { - "version": 1, - "hash": "1118e935792e8e54103dbf91fa33edbf192a7767d2b1d4526dfa7d4a643cde2e", - "action": "add" - } - }, - "ProjectMessage": { - "1": { - "version": 1, - "hash": "55a3a5171b6949372b4125cc461bf39bc998565e07703804fca6c7ef99695ae4", - "action": "add" - } - }, - "ProjectRequestResponse": { - "1": { - "version": 1, - "hash": "d4c360e845697a0b24695143d0781626cd344cfde43162c90ae90fe67e00ae21", - "action": "add" - } - }, - "ProjectRequest": { - "1": { - "version": 1, - "hash": "514d189df335c68869eea36befcdcafec74bdc682eaf18871fe879e26da4dbb6", - "action": "add" - } - }, - "AnswerProjectPoll": { - "1": { - "version": 1, - "hash": "ff2e1ac7bb764c99d646b96eb3ebfbf9311599b7e3be07aa4a4eb4810bb6dd12", - "action": "add" - } - }, - "ProjectPoll": { - "1": { - "version": 1, - "hash": "b0ac8f1d9c06997374ddbc33fdf1d0af0da15fdb6899f52d91a8574106558964", - "action": "add" - } - }, - "Project": { - "1": { - "version": 1, - "hash": "ec5b7ac1c92808e266f06b175c6ebcd50be81777ad120c02ce8c6074d0004788", - "action": "add" - } - }, - "ProjectSubmit": { - "1": { - "version": 1, - "hash": "0374b37779497d7e0b2ffeabc38d35bfbae2ee762a7674a5a8af75e7c5545e61", - "action": "add" - } - }, - "QueueItem": { - "1": { - "version": 1, - "hash": "5aa94681d9d0715d5b605f9625a54e114927271378cf2ea7245f85c488035e0b", - "action": "add" - } - }, - "ZMQClientConfig": { - "1": { - "version": 1, - "hash": "e6054969b495791569caaf33239039beae3d116e1fe74e9575467c48b9007c45", - "action": "add" - } - }, - "SQLiteStoreConfig": { - "1": { - "version": 1, - "hash": "b656b26c14cf4e97aba702dd62a0927aec7f860c12eed512c2c688e1b7109aa5", - "action": "add" - } - }, - "Plan": { - "1": { - "version": 1, - "hash": "a0bba2b7792c9e08c453e9e256f0ac6e6185610726566bcd50b057ae83b42d9a", - "action": "add" - } - } - } - } -} +{} diff --git a/tox.ini b/tox.ini index 357485e1b21..131985a17d2 100644 --- a/tox.ini +++ b/tox.ini @@ -375,6 +375,8 @@ setenv = ENABLE_SIGNUP=False commands = pip list + echo "Staging Protocol changes to dev" + python -c 'import syft as sy; sy.stage_protocol_changes()' pytest -n auto [testenv:stack.test.integration.enclave.oblv] From 365e65c021fdef1f6a3c028e60b8369b6f16a20c Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 6 Oct 2023 17:14:20 +0530 Subject: [PATCH 52/67] stage protocol changes during installation of syft in tox --- tox.ini | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 131985a17d2..55857e4c96a 100644 --- a/tox.ini +++ b/tox.ini @@ -37,8 +37,12 @@ deps = -e{toxinidir}/packages/syft[dev] changedir = {toxinidir}/packages/syft description = Syft +allowlist_externals = + echo commands = pip list + echo "Staging protocol changes" + python -c 'import syft as sy; sy.stage_protocol_changes()' [testenv:hagrid] deps = @@ -375,8 +379,6 @@ setenv = ENABLE_SIGNUP=False commands = pip list - echo "Staging Protocol changes to dev" - python -c 'import syft as sy; sy.stage_protocol_changes()' pytest -n auto [testenv:stack.test.integration.enclave.oblv] From a3ddcdc9505d85b69a31423f168ba622d94e7bd3 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 6 Oct 2023 17:35:27 +0530 Subject: [PATCH 53/67] stage protocol change in all tests in tox.ini --- tox.ini | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 55857e4c96a..3f24bebd62f 100644 --- a/tox.ini +++ b/tox.ini @@ -37,12 +37,8 @@ deps = -e{toxinidir}/packages/syft[dev] changedir = {toxinidir}/packages/syft description = Syft -allowlist_externals = - echo commands = pip list - echo "Staging protocol changes" - python -c 'import syft as sy; sy.stage_protocol_changes()' [testenv:hagrid] deps = @@ -238,6 +234,7 @@ setenv = PYTHONIOENCODING = utf-8 PYTEST_MODULES = {env:PYTEST_MODULES:frontend network e2e security redis} commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "echo Running with HAGRID_FLAGS=$HAGRID_FLAGS EMULATION=$EMULATION PYTEST_MODULES=$PYTEST_MODULES; date" ; install syft and hagrid @@ -380,6 +377,7 @@ setenv = commands = pip list pytest -n auto + python -c 'import syft as sy; sy.stage_protocol_changes()' [testenv:stack.test.integration.enclave.oblv] description = Integration Tests for Oblv Enclave @@ -397,6 +395,7 @@ setenv = DOMAIN_CONNECTION_PORT=8010 ENABLE_SIGNUP=True commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' pip install oblv-ctl==0.3.1 # run at start to kill any process started beforehand bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT' @@ -426,6 +425,7 @@ setenv = TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials} ENABLE_SIGNUP=True commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date" bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' '); do \ if [[ $subfolder == *tutorials* ]]; then \ @@ -457,6 +457,7 @@ setenv = TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials} ENABLE_SIGNUP=True commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' # Volume cleanup bash -c "docker volume rm test-domain-1_mongo-data --force || true" @@ -489,6 +490,7 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:vm} VAGRANT_DESTROY = {env:VAGRANT_DESTROY:skip} commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c 'if [[ "$(uname -m)" == *"arm"* ]]; then \ export VAGRANT_BOX="ubuntu-22-04-arm64"; \ elif [[ "$(uname -m)" == *"x86"* ]]; then \ @@ -539,6 +541,7 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:podman} NODE_PORT = {env:NODE_PORT:8080} commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "podman pod rm --force --all || true"; bash -c "podman system prune --volumes --force || true"; bash -c "podman volume rm $(podman volume ls -q)||true"; @@ -609,6 +612,7 @@ setenv = NODE_PORT = {env:NODE_PORT:9082} commands = k3d version + python -c 'import syft as sy; sy.stage_protocol_changes()' ; bash -c "docker rm $(docker ps -aq) --force || true" # bash -c "k3d cluster delete test-gateway-1 || true" @@ -872,6 +876,7 @@ commands = bash -c '(kubectl logs service/backend --context k3d-syft --namespace syft -f &) | grep -q "Application startup complete" || true' + python -c 'import syft as sy; sy.stage_protocol_changes()' ; frontend bash -c 'if [[ "$PYTEST_MODULES" == *"frontend"* ]]; then \ echo "Starting frontend"; date; \ From 07a3bfa1fd48a51698898371894189f5cefee3bd Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Fri, 6 Oct 2023 18:28:41 +0530 Subject: [PATCH 54/67] =?UTF-8?q?dumb=20mistake=20of=20staging=20protocol?= =?UTF-8?q?=20post=20test=20=F0=9F=A4=A6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 3f24bebd62f..6817beff53b 100644 --- a/tox.ini +++ b/tox.ini @@ -375,9 +375,9 @@ changedir = {toxinidir}/packages/syft setenv = ENABLE_SIGNUP=False commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' pip list pytest -n auto - python -c 'import syft as sy; sy.stage_protocol_changes()' [testenv:stack.test.integration.enclave.oblv] description = Integration Tests for Oblv Enclave From 665364e80402d30cecfd652cde837b1566c8ce56 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 9 Oct 2023 14:04:12 +0530 Subject: [PATCH 55/67] mock and patch protocol file and filename for tests - create new protocol file for each test and unlink after run - stage and reset protocol file after each test remove stage protocol command --- packages/syft/tests/conftest.py | 41 ++++++++++++++++++- .../migrations/protocol_communication_test.py | 12 +----- tox.ini | 10 +---- 3 files changed, 42 insertions(+), 21 deletions(-) diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index 734faf9d5a5..f403159b8a3 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -1,9 +1,16 @@ +# stdlib +import json +from pathlib import Path +from unittest import mock + # third party from faker import Faker import pytest # syft absolute import syft as sy +from syft.protocol.data_protocol import get_data_protocol +from syft.protocol.data_protocol import stage_protocol_changes # relative from .syft.stores.store_fixtures_test import dict_action_store # noqa: F401 @@ -26,8 +33,40 @@ def faker(): return Faker() +def create_file(filepath: Path, data: dict): + with open(filepath, "w") as fp: + fp.write(json.dumps(data)) + + +def remove_file(filepath: Path): + filepath.unlink(missing_ok=True) + + +@pytest.fixture(autouse=True) +def protocol_file(): + random_name = sy.UID().to_string() + protocol_dir = sy.SYFT_PATH / "protocol" + file_path = protocol_dir / f"{random_name}.json" + dp = get_data_protocol() + create_file(filepath=file_path, data=dp.protocol_history) + yield file_path + remove_file(filepath=file_path) + + +@pytest.fixture(autouse=True) +def stage_protocol(protocol_file: Path): + with mock.patch( + "syft.protocol.data_protocol.PROTOCOL_STATE_FILENAME", + protocol_file.name, + ): + dp = get_data_protocol() + stage_protocol_changes() + yield + dp.save_history(dp.protocol_history) + + @pytest.fixture(autouse=True) -def worker(faker): +def worker(faker, stage_protocol): return sy.Worker.named(name=faker.name()) diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py index ce6966a4ac1..88e48ad5098 100644 --- a/packages/syft/tests/syft/migrations/protocol_communication_test.py +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -4,9 +4,6 @@ from typing import Union from unittest import mock -# third party -import pytest - # syft absolute import syft as sy from syft.node.worker import Worker @@ -154,14 +151,7 @@ def setup_version_second(node_name: str, klass_version_one: type): return node, syft_klass_version_second -@pytest.fixture -def dp_fixture(): - dp = get_data_protocol() - yield - dp.save_history(dp.protocol_history) - - -def test_client_server_running_different_protocols(dp_fixture): +def test_client_server_running_different_protocols(): node_name = UID().to_string() # Setup mock object version one diff --git a/tox.ini b/tox.ini index 6817beff53b..c0c583dc43f 100644 --- a/tox.ini +++ b/tox.ini @@ -234,7 +234,6 @@ setenv = PYTHONIOENCODING = utf-8 PYTEST_MODULES = {env:PYTEST_MODULES:frontend network e2e security redis} commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "echo Running with HAGRID_FLAGS=$HAGRID_FLAGS EMULATION=$EMULATION PYTEST_MODULES=$PYTEST_MODULES; date" ; install syft and hagrid @@ -375,7 +374,6 @@ changedir = {toxinidir}/packages/syft setenv = ENABLE_SIGNUP=False commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' pip list pytest -n auto @@ -395,7 +393,6 @@ setenv = DOMAIN_CONNECTION_PORT=8010 ENABLE_SIGNUP=True commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' pip install oblv-ctl==0.3.1 # run at start to kill any process started beforehand bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT' @@ -425,7 +422,6 @@ setenv = TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials} ENABLE_SIGNUP=True commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "echo Running with ORCHESTRA_DEPLOYMENT_TYPE=$ORCHESTRA_DEPLOYMENT_TYPE DEV_MODE=$DEV_MODE TEST_NOTEBOOK_PATHS=$TEST_NOTEBOOK_PATHS; date" bash -c "for subfolder in $(echo ${TEST_NOTEBOOK_PATHS} | tr ',' ' '); do \ if [[ $subfolder == *tutorials* ]]; then \ @@ -457,7 +453,6 @@ setenv = TEST_NOTEBOOK_PATHS = {env:TEST_NOTEBOOK_PATHS:api/0.8,tutorials} ENABLE_SIGNUP=True commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' # Volume cleanup bash -c "docker volume rm test-domain-1_mongo-data --force || true" @@ -490,7 +485,6 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:vm} VAGRANT_DESTROY = {env:VAGRANT_DESTROY:skip} commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c 'if [[ "$(uname -m)" == *"arm"* ]]; then \ export VAGRANT_BOX="ubuntu-22-04-arm64"; \ elif [[ "$(uname -m)" == *"x86"* ]]; then \ @@ -541,7 +535,6 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:podman} NODE_PORT = {env:NODE_PORT:8080} commands = - python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "podman pod rm --force --all || true"; bash -c "podman system prune --volumes --force || true"; bash -c "podman volume rm $(podman volume ls -q)||true"; @@ -612,7 +605,6 @@ setenv = NODE_PORT = {env:NODE_PORT:9082} commands = k3d version - python -c 'import syft as sy; sy.stage_protocol_changes()' ; bash -c "docker rm $(docker ps -aq) --force || true" # bash -c "k3d cluster delete test-gateway-1 || true" @@ -876,7 +868,7 @@ commands = bash -c '(kubectl logs service/backend --context k3d-syft --namespace syft -f &) | grep -q "Application startup complete" || true' - python -c 'import syft as sy; sy.stage_protocol_changes()' + ; frontend bash -c 'if [[ "$PYTEST_MODULES" == *"frontend"* ]]; then \ echo "Starting frontend"; date; \ From 55d217ee1cd1b4c8b49d4aa418ffa60517529aef Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 9 Oct 2023 17:57:23 +0530 Subject: [PATCH 56/67] stage protocol change in dev mode when single container worker is launched --- packages/grid/backend/grid/main.py | 6 +++--- packages/grid/worker/worker.py | 7 +++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/grid/backend/grid/main.py b/packages/grid/backend/grid/main.py index bc70d91a555..8c4b34507d8 100644 --- a/packages/grid/backend/grid/main.py +++ b/packages/grid/backend/grid/main.py @@ -6,6 +6,9 @@ from fastapi.responses import JSONResponse from starlette.middleware.cors import CORSMiddleware +# syft absolute +from syft.protocol.data_protocol import stage_protocol_changes + # grid absolute from grid.api.router import api_router from grid.core.config import settings @@ -32,9 +35,6 @@ if settings.DEV_MODE: - # syft absolute - from syft.protocol.data_protocol import stage_protocol_changes - print("Staging protocol changes...") status = stage_protocol_changes() print(status) diff --git a/packages/grid/worker/worker.py b/packages/grid/worker/worker.py index 3150f84669f..ea3b9d18315 100644 --- a/packages/grid/worker/worker.py +++ b/packages/grid/worker/worker.py @@ -10,11 +10,13 @@ from syft.node.domain import Domain from syft.node.enclave import Enclave from syft.node.gateway import Gateway +from syft.node.node import get_dev_mode from syft.node.node import get_enable_warnings from syft.node.node import get_node_name from syft.node.node import get_node_side_type from syft.node.node import get_node_type from syft.node.routes import make_routes +from syft.protocol.data_protocol import stage_protocol_changes worker_classes = { NodeType.DOMAIN: Domain, @@ -41,6 +43,11 @@ app = FastAPI(title="Worker") +if get_dev_mode(): + print("Staging protocol changes...") + status = stage_protocol_changes() + print(status) + @app.get("/") async def root() -> str: From 3babdb1ed94445e14b7b914c2b6bedf2236082b3 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Mon, 9 Oct 2023 18:49:46 +0530 Subject: [PATCH 57/67] launch containers in dev mode in integration tests --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index c0c583dc43f..73ed01e3292 100644 --- a/tox.ini +++ b/tox.ini @@ -271,9 +271,9 @@ commands = bash -c "docker volume rm test-gateway-1_tailscale-data --force || true" bash -c "docker volume rm test-gateway-1_headscale-data --force || true" - bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_gateway_1 network to docker:9081 $HAGRID_FLAGS --no-health-checks --verbose --no-warnings' - bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9082 $HAGRID_FLAGS --no-health-checks --enable-signup --verbose --no-warnings' - bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_2 domain to docker:9083 --headless $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings' + bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_gateway_1 network to docker:9081 $HAGRID_FLAGS --no-health-checks --verbose --no-warnings --dev' + bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9082 $HAGRID_FLAGS --no-health-checks --enable-signup --verbose --no-warnings --dev' + bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_2 domain to docker:9083 --headless $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings --dev' ; wait for nodes to start docker ps From 37c799d7b2877f22da965938171b536b66e854cc Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 10 Oct 2023 13:36:42 +0530 Subject: [PATCH 58/67] stage protocol in tox --- tox.ini | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tox.ini b/tox.ini index 73ed01e3292..c475c764902 100644 --- a/tox.ini +++ b/tox.ini @@ -200,6 +200,8 @@ commands = bash -c "docker volume rm test-domain-1_credentials-data --force || true" bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" + python -c 'import syft as sy; sy.stage_protocol_changes()' + bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9081 $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings' bash -c '(docker logs test_domain_1-frontend-1 -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true' @@ -271,6 +273,8 @@ commands = bash -c "docker volume rm test-gateway-1_tailscale-data --force || true" bash -c "docker volume rm test-gateway-1_headscale-data --force || true" + python -c 'import syft as sy; sy.stage_protocol_changes()' + bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_gateway_1 network to docker:9081 $HAGRID_FLAGS --no-health-checks --verbose --no-warnings --dev' bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9082 $HAGRID_FLAGS --no-health-checks --enable-signup --verbose --no-warnings --dev' bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_2 domain to docker:9083 --headless $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings --dev' @@ -404,6 +408,8 @@ commands = # Starting FastAPI server locally bash -c 'cd ~/.syft/syft-enclave/src && uvicorn app:app --host 0.0.0.0 --port $LOCAL_ENCLAVE_PORT > /dev/null 2>&1 &' + python -c 'import syft as sy; sy.stage_protocol_changes()' + bash -c 'cd tests/integration/external/oblv && pytest -p no:randomly -vvvv' bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT' @@ -535,6 +541,7 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:podman} NODE_PORT = {env:NODE_PORT:8080} commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' bash -c "podman pod rm --force --all || true"; bash -c "podman system prune --volumes --force || true"; bash -c "podman volume rm $(podman volume ls -q)||true"; @@ -604,6 +611,7 @@ setenv = ORCHESTRA_DEPLOYMENT_TYPE = {env:ORCHESTRA_DEPLOYMENT_TYPE:k8s} NODE_PORT = {env:NODE_PORT:9082} commands = + python -c 'import syft as sy; sy.stage_protocol_changes()' k3d version ; bash -c "docker rm $(docker ps -aq) --force || true" From 0d640becd3c5efb924558f3de073123789038fd3 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 10 Oct 2023 14:01:25 +0530 Subject: [PATCH 59/67] remove unused references --- tox.ini | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tox.ini b/tox.ini index c475c764902..3f28bf1f28d 100644 --- a/tox.ini +++ b/tox.ini @@ -200,8 +200,6 @@ commands = bash -c "docker volume rm test-domain-1_credentials-data --force || true" bash -c "docker volume rm test-domain-1_seaweedfs-data --force || true" - python -c 'import syft as sy; sy.stage_protocol_changes()' - bash -c 'HAGRID_ART=$HAGRID_ART hagrid launch test_domain_1 domain to docker:9081 $HAGRID_FLAGS --enable-signup --no-health-checks --verbose --no-warnings' bash -c '(docker logs test_domain_1-frontend-1 -f &) | grep -q -E "Network:\s+https?://[a-zA-Z0-9.-]+:[0-9]+/" || true' @@ -408,8 +406,6 @@ commands = # Starting FastAPI server locally bash -c 'cd ~/.syft/syft-enclave/src && uvicorn app:app --host 0.0.0.0 --port $LOCAL_ENCLAVE_PORT > /dev/null 2>&1 &' - python -c 'import syft as sy; sy.stage_protocol_changes()' - bash -c 'cd tests/integration/external/oblv && pytest -p no:randomly -vvvv' bash -c 'chmod +x scripts/kill_process_in_port.sh && ./scripts/kill_process_in_port.sh $LOCAL_ENCLAVE_PORT' From 656cb33050e28fbdcf7657d134e79d7364c62ab2 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Tue, 10 Oct 2023 19:42:50 +0530 Subject: [PATCH 60/67] handle wrapping annotations correctly if annotation is not of typing class --- packages/syft/src/syft/client/api.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/syft/src/syft/client/api.py b/packages/syft/src/syft/client/api.py index c5916ca3a93..86bab84c558 100644 --- a/packages/syft/src/syft/client/api.py +++ b/packages/syft/src/syft/client/api.py @@ -16,6 +16,7 @@ from typing import Union from typing import _GenericAlias from typing import get_args +from typing import get_origin # third party from nacl.exceptions import BadSignatureError @@ -426,6 +427,7 @@ def downgrade_signature(signature: Signature, object_versions: Dict): def unwrap_and_migrate_annotation(annotation, object_versions): args = get_args(annotation) + origin = get_origin(annotation) if len(args) == 0: if ( isinstance(annotation, type) @@ -448,7 +450,14 @@ def unwrap_and_migrate_annotation(annotation, object_versions): migrated_annotation = unwrap_and_migrate_annotation(arg, object_versions) migrated_annotations.append(migrated_annotation) - return annotation.copy_with(tuple(migrated_annotations)) + migrated_annotations = tuple(migrated_annotations) + + if hasattr(annotation, "copy_with"): + return annotation.copy_with(migrated_annotations) + elif origin is not None: + return origin[migrated_annotations] + else: + return migrated_annotation[0] @instrument From 072fb08e1476e1eb5027e51dafaca30d1219173d Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 11 Oct 2023 15:51:33 +0530 Subject: [PATCH 61/67] patch module of mock classes to syft.test in protocol communication test - add a utility to add a klass and module to syft module --- .../syft/service/network/network_service.py | 1 - packages/syft/src/syft/util/util.py | 10 ++++ .../migrations/protocol_communication_test.py | 60 +++++++++---------- 3 files changed, 38 insertions(+), 33 deletions(-) diff --git a/packages/syft/src/syft/service/network/network_service.py b/packages/syft/src/syft/service/network/network_service.py index ee8fe02843b..aa9a4b62c9c 100644 --- a/packages/syft/src/syft/service/network/network_service.py +++ b/packages/syft/src/syft/service/network/network_service.py @@ -503,7 +503,6 @@ def http_connection_to_node_route() -> List[Callable]: def get_python_node_route(context: TransformContext) -> TransformContext: context.output["id"] = context.obj.node.id - print("Store config....", context.obj.node.blob_store_config) context.output["worker_settings"] = WorkerSettings.from_node(context.obj.node) context.output["proxy_target_uid"] = context.obj.proxy_target_uid return context diff --git a/packages/syft/src/syft/util/util.py b/packages/syft/src/syft/util/util.py index 061a9ac9ad5..27f51c392c7 100644 --- a/packages/syft/src/syft/util/util.py +++ b/packages/syft/src/syft/util/util.py @@ -22,6 +22,7 @@ import sys import threading import time +import types from types import ModuleType from typing import Any from typing import Callable @@ -866,3 +867,12 @@ def get_interpreter_module() -> str: def thread_ident() -> int: return threading.current_thread().ident + + +def set_klass_module_to_syft(klass, module_name): + if module_name not in sys.modules["syft"].__dict__: + new_module = types.ModuleType(module_name) + else: + new_module = sys.modules["syft"].__dict__[module_name] + setattr(new_module, klass.__name__, klass) + sys.modules["syft"].__dict__[module_name] = new_module diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py index 88e48ad5098..6ab7ee1c73b 100644 --- a/packages/syft/tests/syft/migrations/protocol_communication_test.py +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -2,7 +2,6 @@ from typing import List from typing import Type from typing import Union -from unittest import mock # syft absolute import syft as sy @@ -25,7 +24,7 @@ from syft.types.transforms import convert_types from syft.types.transforms import rename from syft.types.uid import UID -from syft.util.util import index_syft_by_module_name +from syft.util.util import set_klass_module_to_syft def get_klass_version_1(): @@ -37,7 +36,9 @@ class SyftMockObjectTestV1(SyftObject): id: UID name: str version: int + __module__: str = "syft.test" + set_klass_module_to_syft(SyftMockObjectTestV1, module_name="test") return SyftMockObjectTestV1 @@ -50,7 +51,9 @@ class SyftMockObjectTestV2(SyftObject): id: UID full_name: str version: str + __module__: str = "syft.test" + set_klass_module_to_syft(SyftMockObjectTestV2, module_name="test") return SyftMockObjectTestV2 @@ -73,10 +76,12 @@ class SyftMockObjectStash(BaseStash): name=object_type.__canonical_name__, object_type=syft_object, ) + __module__: str = "syft.test" def __init__(self, store: DocumentStore) -> None: super().__init__(store=store) + set_klass_module_to_syft(SyftMockObjectStash, module_name="test") return SyftMockObjectStash @@ -87,6 +92,7 @@ def setup_service_method(syft_object): class SyftMockObjectService(AbstractService): store: DocumentStore stash: stash_klass + __module__: str = "syft.test" def __init__(self, store: DocumentStore) -> None: self.store = store @@ -105,6 +111,7 @@ def get( return result.ok() return SyftError(message=f"{result.err()}") + set_klass_module_to_syft(SyftMockObjectService, module_name="test") return SyftMockObjectService @@ -177,7 +184,9 @@ def test_client_server_running_different_protocols(): assert isinstance(sample_data, klass_v2) # Validate migrations - sample_data_v1 = sample_data.migrate_to(version=protocol_version_with_mock_obj_v1) + sample_data_v1 = sample_data.migrate_to( + version=protocol_version_with_mock_obj_v1, + ) assert sample_data_v1.name == sample_data.full_name assert sample_data_v1.version == int(sample_data.version) @@ -188,32 +197,19 @@ def test_client_server_running_different_protocols(): sample_data, ) - # patch the index syft module function - def patched_index_syft_by_module_name(fully_qualified_name: str) -> object: - if klass_v1.__name__ in fully_qualified_name: - return klass_v1 - elif klass_v2.__name__ in fully_qualified_name: - return klass_v2 - - return index_syft_by_module_name(fully_qualified_name) - - with mock.patch( - "syft.client.api.index_syft_by_module_name", - patched_index_syft_by_module_name, - ): - nh2_client = nh2.client - assert nh2_client is not None - # Force communication protocol to when version object is defined - nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 - # Reset api - nh2_client._api = None - - # Call the API with an older communication protocol version - result2 = nh2_client.api.services.dummy.get() - assert isinstance(result2, list) - - # Validate the data received - for data in result2: - assert isinstance(data, klass_v1) - assert data.name == sample_data.full_name - assert data.version == int(sample_data.version) + nh2_client = nh2.client + assert nh2_client is not None + # Force communication protocol to when version object is defined + nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 + # Reset api + nh2_client._api = None + + # Call the API with an older communication protocol version + result2 = nh2_client.api.services.dummy.get() + assert isinstance(result2, list) + + # Validate the data received + for data in result2: + assert isinstance(data, klass_v1) + assert data.name == sample_data.full_name + assert data.version == int(sample_data.version) From 5936ee37e0ac8b093730323c36e30a1f1d9030b5 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Wed, 11 Oct 2023 17:24:52 +0530 Subject: [PATCH 62/67] mock patch TYPE BANK for protocol communication test - consider action object and its subclasses for data migration as well --- packages/syft/src/syft/node/node.py | 24 +++- .../migrations/protocol_communication_test.py | 116 ++++++++++-------- 2 files changed, 83 insertions(+), 57 deletions(-) diff --git a/packages/syft/src/syft/node/node.py b/packages/syft/src/syft/node/node.py index 13e2d4dfd75..eb0e9e7d7a3 100644 --- a/packages/syft/src/syft/node/node.py +++ b/packages/syft/src/syft/node/node.py @@ -47,6 +47,8 @@ from ..protocol.data_protocol import get_data_protocol from ..serde.deserialize import _deserialize from ..serde.serialize import _serialize +from ..service.action.action_object import Action +from ..service.action.action_object import ActionObject from ..service.action.action_service import ActionService from ..service.action.action_store import DictActionStore from ..service.action.action_store import SQLiteActionStore @@ -456,7 +458,7 @@ def root_client(self): return root_client def _find_pending_migrations(self): - partition_to_be_migrated = [] + klasses_to_be_migrated = [] context = AuthedServiceContext( node=self, @@ -465,24 +467,38 @@ def _find_pending_migrations(self): ) migration_state_service = self.get_service(MigrateStateService) + canonical_name_version_map = [] + + # Track all object types from document store for partition in self.document_store.partitions.values(): object_type = partition.settings.object_type canonical_name = object_type.__canonical_name__ + object_version = object_type.__version__ + canonical_name_version_map.append((canonical_name, object_version)) + + # Track all object types from action store + action_object_types = [Action, ActionObject] + action_object_types.extend(ActionObject.__subclasses__()) + for object_type in action_object_types: + canonical_name = object_type.__canonical_name__ + object_version = object_type.__version__ + canonical_name_version_map.append((canonical_name, object_version)) + for canonical_name, current_version in canonical_name_version_map: migration_state = migration_state_service.get_state(context, canonical_name) if ( migration_state is not None and migration_state.current_version != migration_state.latest_version ): - partition_to_be_migrated.append(canonical_name) + klasses_to_be_migrated.append(canonical_name) else: migration_state_service.register_migration_state( context, - current_version=object_type.__version__, + current_version=current_version, canonical_name=canonical_name, ) - return partition_to_be_migrated + return klasses_to_be_migrated @property def guest_client(self): diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py index 6ab7ee1c73b..f480ce76adb 100644 --- a/packages/syft/tests/syft/migrations/protocol_communication_test.py +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -1,12 +1,15 @@ # stdlib +from copy import deepcopy from typing import List from typing import Type from typing import Union +from unittest import mock # syft absolute import syft as sy from syft.node.worker import Worker from syft.protocol.data_protocol import get_data_protocol +from syft.serde.recursive import TYPE_BANK from syft.serde.serializable import serializable from syft.service.context import AuthedServiceContext from syft.service.response import SyftError @@ -26,6 +29,8 @@ from syft.types.uid import UID from syft.util.util import set_klass_module_to_syft +MY_TEST_TYPE_BANK = deepcopy(TYPE_BANK) + def get_klass_version_1(): @serializable() @@ -70,6 +75,7 @@ def mock_v2_to_v1(): def get_stash_klass(syft_object: Type[SyftBaseObject]): + @serializable() class SyftMockObjectStash(BaseStash): object_type = syft_object settings: PartitionSettings = PartitionSettings( @@ -117,7 +123,6 @@ def get( def setup_version_one(node_name: str): syft_klass_version_one = get_klass_version_1() - sy.stage_protocol_changes() sy.bump_protocol_version() @@ -161,55 +166,60 @@ def setup_version_second(node_name: str, klass_version_one: type): def test_client_server_running_different_protocols(): node_name = UID().to_string() - # Setup mock object version one - nh1, klass_v1 = setup_version_one(node_name) - assert klass_v1.__canonical_name__ == "SyftMockObjectTest" - assert klass_v1.__name__ == "SyftMockObjectTestV1" - - nh1_client = nh1.client - assert nh1_client is not None - result_from_client_1 = nh1_client.api.services.dummy.get() - - protocol_version_with_mock_obj_v1 = get_data_protocol().latest_version - - # No data saved - assert len(result_from_client_1) == 0 - - # Setup mock object version second - nh2, klass_v2 = setup_version_second(node_name, klass_version_one=klass_v1) - - # Create a sample data in version second - sample_data = klass_v2(full_name="John", version=str(1), id=UID()) - - assert isinstance(sample_data, klass_v2) - - # Validate migrations - sample_data_v1 = sample_data.migrate_to( - version=protocol_version_with_mock_obj_v1, - ) - assert sample_data_v1.name == sample_data.full_name - assert sample_data_v1.version == int(sample_data.version) - - # Set the sample data in version second - service_klass = nh1.python_node.get_service("SyftMockObjectService") - service_klass.stash.set( - nh1.python_node.root_client.verify_key, - sample_data, - ) - - nh2_client = nh2.client - assert nh2_client is not None - # Force communication protocol to when version object is defined - nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 - # Reset api - nh2_client._api = None - - # Call the API with an older communication protocol version - result2 = nh2_client.api.services.dummy.get() - assert isinstance(result2, list) - - # Validate the data received - for data in result2: - assert isinstance(data, klass_v1) - assert data.name == sample_data.full_name - assert data.version == int(sample_data.version) + with mock.patch("syft.serde.recursive.TYPE_BANK", MY_TEST_TYPE_BANK): + with mock.patch( + "syft.protocol.data_protocol.TYPE_BANK", + MY_TEST_TYPE_BANK, + ): + # Setup mock object version one + nh1, klass_v1 = setup_version_one(node_name) + assert klass_v1.__canonical_name__ == "SyftMockObjectTest" + assert klass_v1.__name__ == "SyftMockObjectTestV1" + + nh1_client = nh1.client + assert nh1_client is not None + result_from_client_1 = nh1_client.api.services.dummy.get() + + protocol_version_with_mock_obj_v1 = get_data_protocol().latest_version + + # No data saved + assert len(result_from_client_1) == 0 + + # Setup mock object version second + nh2, klass_v2 = setup_version_second(node_name, klass_version_one=klass_v1) + + # Create a sample data in version second + sample_data = klass_v2(full_name="John", version=str(1), id=UID()) + + assert isinstance(sample_data, klass_v2) + + # Validate migrations + sample_data_v1 = sample_data.migrate_to( + version=protocol_version_with_mock_obj_v1, + ) + assert sample_data_v1.name == sample_data.full_name + assert sample_data_v1.version == int(sample_data.version) + + # Set the sample data in version second + service_klass = nh1.python_node.get_service("SyftMockObjectService") + service_klass.stash.set( + nh1.python_node.root_client.verify_key, + sample_data, + ) + + nh2_client = nh2.client + assert nh2_client is not None + # Force communication protocol to when version object is defined + nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 + # Reset api + nh2_client._api = None + + # Call the API with an older communication protocol version + result2 = nh2_client.api.services.dummy.get() + assert isinstance(result2, list) + + # Validate the data received + for data in result2: + assert isinstance(data, klass_v1) + assert data.name == sample_data.full_name + assert data.version == int(sample_data.version) From 1a4a76e6d8caafb7ebc06d1f8ea3c4258a4da341 Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 12 Oct 2023 01:05:18 +0530 Subject: [PATCH 63/67] revert comm protocol test to patch index_syft_module_by_name - bump protocol version in stage_protocol fixture - handle syft base object in migration registry --- .../syft/src/syft/protocol/data_protocol.py | 2 +- packages/syft/src/syft/types/syft_object.py | 39 ++++-- packages/syft/tests/conftest.py | 4 +- .../migrations/protocol_communication_test.py | 129 +++++++++--------- 4 files changed, 99 insertions(+), 75 deletions(-) diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index cdc688df946..ca8e98e8cb3 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -115,7 +115,7 @@ def build_state(self, stop_key: Optional[str] = None) -> dict: or hash_str not in state_versions.values() ): raise Exception( - f"Can't remove {object_metadata} missing from state {versions}" + f"Can't remove {object_metadata} missing from state {versions} for object {canonical_name}." ) if action == "add": state_dict[canonical_name][str(version)] = hash_str diff --git a/packages/syft/src/syft/types/syft_object.py b/packages/syft/src/syft/types/syft_object.py index 38a4c33fa4c..49f89f4f99d 100644 --- a/packages/syft/src/syft/types/syft_object.py +++ b/packages/syft/src/syft/types/syft_object.py @@ -263,12 +263,18 @@ def get_migration( cls, type_from: Type[SyftBaseObject], type_to: Type[SyftBaseObject] ) -> Callable: for type_from_mro in type_from.mro(): - if issubclass(type_from_mro, SyftBaseObject): + if ( + issubclass(type_from_mro, SyftBaseObject) + and type_from_mro != SyftBaseObject + ): klass_from = type_from_mro.__canonical_name__ version_from = type_from_mro.__version__ for type_to_mro in type_to.mro(): - if issubclass(type_to_mro, SyftBaseObject): + if ( + issubclass(type_to_mro, SyftBaseObject) + and type_to_mro != SyftBaseObject + ): klass_to = type_to_mro.__canonical_name__ version_to = type_to_mro.__version__ @@ -286,12 +292,23 @@ def get_migration( def get_migration_for_version( cls, type_from: Type[SyftBaseObject], version_to: int ) -> Callable: + canonical_name = type_from.__canonical_name__ for type_from_mro in type_from.mro(): - if issubclass(type_from_mro, SyftBaseObject): + if ( + issubclass(type_from_mro, SyftBaseObject) + and type_from_mro != SyftBaseObject + ): klass_from = type_from_mro.__canonical_name__ + if klass_from != canonical_name: + continue version_from = type_from_mro.__version__ mapping_string = f"{version_from}x{version_to}" - if mapping_string in cls.__migration_transform_registry__[klass_from]: + if ( + mapping_string + in cls.__migration_transform_registry__[ + type_from.__canonical_name__ + ] + ): return cls.__migration_transform_registry__[klass_from][ mapping_string ] @@ -582,17 +599,15 @@ def _syft_searchable_keys_dict(cls) -> Dict[str, type]: return cls._syft_keys_types_dict("__attr_searchable__") def migrate_to(self, version: int, context: Optional[Context] = None) -> Any: - migration_transform = SyftMigrationRegistry.get_migration_for_version( - type_from=type(self), version_to=version - ) - return ( - migration_transform( + if self.__version__ != version: + migration_transform = SyftMigrationRegistry.get_migration_for_version( + type_from=type(self), version_to=version + ) + return migration_transform( self, context, ) - if self.__version__ != version - else self - ) + return self def short_qual_name(name: str) -> str: diff --git a/packages/syft/tests/conftest.py b/packages/syft/tests/conftest.py index f403159b8a3..49d19b1049f 100644 --- a/packages/syft/tests/conftest.py +++ b/packages/syft/tests/conftest.py @@ -9,6 +9,7 @@ # syft absolute import syft as sy +from syft.protocol.data_protocol import bump_protocol_version from syft.protocol.data_protocol import get_data_protocol from syft.protocol.data_protocol import stage_protocol_changes @@ -61,7 +62,8 @@ def stage_protocol(protocol_file: Path): ): dp = get_data_protocol() stage_protocol_changes() - yield + bump_protocol_version() + yield dp.protocol_history dp.save_history(dp.protocol_history) diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py index f480ce76adb..4e44727395a 100644 --- a/packages/syft/tests/syft/migrations/protocol_communication_test.py +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -27,7 +27,7 @@ from syft.types.transforms import convert_types from syft.types.transforms import rename from syft.types.uid import UID -from syft.util.util import set_klass_module_to_syft +from syft.util.util import index_syft_by_module_name MY_TEST_TYPE_BANK = deepcopy(TYPE_BANK) @@ -41,9 +41,7 @@ class SyftMockObjectTestV1(SyftObject): id: UID name: str version: int - __module__: str = "syft.test" - set_klass_module_to_syft(SyftMockObjectTestV1, module_name="test") return SyftMockObjectTestV1 @@ -56,9 +54,7 @@ class SyftMockObjectTestV2(SyftObject): id: UID full_name: str version: str - __module__: str = "syft.test" - set_klass_module_to_syft(SyftMockObjectTestV2, module_name="test") return SyftMockObjectTestV2 @@ -82,12 +78,10 @@ class SyftMockObjectStash(BaseStash): name=object_type.__canonical_name__, object_type=syft_object, ) - __module__: str = "syft.test" def __init__(self, store: DocumentStore) -> None: super().__init__(store=store) - set_klass_module_to_syft(SyftMockObjectStash, module_name="test") return SyftMockObjectStash @@ -117,7 +111,6 @@ def get( return result.ok() return SyftError(message=f"{result.err()}") - set_klass_module_to_syft(SyftMockObjectService, module_name="test") return SyftMockObjectService @@ -163,7 +156,15 @@ def setup_version_second(node_name: str, klass_version_one: type): return node, syft_klass_version_second -def test_client_server_running_different_protocols(): +def test_client_server_running_different_protocols(stage_protocol): + def patched_index_syft_by_module_name(fully_qualified_name: str): + if klass_v1.__name__ in fully_qualified_name: + return klass_v1 + elif klass_v2.__name__ in fully_qualified_name: + return klass_v2 + + return index_syft_by_module_name(fully_qualified_name) + node_name = UID().to_string() with mock.patch("syft.serde.recursive.TYPE_BANK", MY_TEST_TYPE_BANK): @@ -171,55 +172,61 @@ def test_client_server_running_different_protocols(): "syft.protocol.data_protocol.TYPE_BANK", MY_TEST_TYPE_BANK, ): - # Setup mock object version one - nh1, klass_v1 = setup_version_one(node_name) - assert klass_v1.__canonical_name__ == "SyftMockObjectTest" - assert klass_v1.__name__ == "SyftMockObjectTestV1" - - nh1_client = nh1.client - assert nh1_client is not None - result_from_client_1 = nh1_client.api.services.dummy.get() - - protocol_version_with_mock_obj_v1 = get_data_protocol().latest_version - - # No data saved - assert len(result_from_client_1) == 0 - - # Setup mock object version second - nh2, klass_v2 = setup_version_second(node_name, klass_version_one=klass_v1) - - # Create a sample data in version second - sample_data = klass_v2(full_name="John", version=str(1), id=UID()) - - assert isinstance(sample_data, klass_v2) - - # Validate migrations - sample_data_v1 = sample_data.migrate_to( - version=protocol_version_with_mock_obj_v1, - ) - assert sample_data_v1.name == sample_data.full_name - assert sample_data_v1.version == int(sample_data.version) - - # Set the sample data in version second - service_klass = nh1.python_node.get_service("SyftMockObjectService") - service_klass.stash.set( - nh1.python_node.root_client.verify_key, - sample_data, - ) - - nh2_client = nh2.client - assert nh2_client is not None - # Force communication protocol to when version object is defined - nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 - # Reset api - nh2_client._api = None - - # Call the API with an older communication protocol version - result2 = nh2_client.api.services.dummy.get() - assert isinstance(result2, list) - - # Validate the data received - for data in result2: - assert isinstance(data, klass_v1) - assert data.name == sample_data.full_name - assert data.version == int(sample_data.version) + with mock.patch( + "syft.client.api.index_syft_by_module_name", + patched_index_syft_by_module_name, + ): + # Setup mock object version one + nh1, klass_v1 = setup_version_one(node_name) + assert klass_v1.__canonical_name__ == "SyftMockObjectTest" + assert klass_v1.__name__ == "SyftMockObjectTestV1" + + nh1_client = nh1.client + assert nh1_client is not None + result_from_client_1 = nh1_client.api.services.dummy.get() + + protocol_version_with_mock_obj_v1 = get_data_protocol().latest_version + + # No data saved + assert len(result_from_client_1) == 0 + + # Setup mock object version second + nh2, klass_v2 = setup_version_second( + node_name, klass_version_one=klass_v1 + ) + + # Create a sample data in version second + sample_data = klass_v2(full_name="John", version=str(1), id=UID()) + + assert isinstance(sample_data, klass_v2) + + # Validate migrations + sample_data_v1 = sample_data.migrate_to( + version=klass_v1.__version__, + ) + assert sample_data_v1.name == sample_data.full_name + assert sample_data_v1.version == int(sample_data.version) + + # Set the sample data in version second + service_klass = nh1.python_node.get_service("SyftMockObjectService") + service_klass.stash.set( + nh1.python_node.root_client.verify_key, + sample_data, + ) + + nh2_client = nh2.client + assert nh2_client is not None + # Force communication protocol to when version object is defined + nh2_client.communication_protocol = protocol_version_with_mock_obj_v1 + # Reset api + nh2_client._api = None + + # Call the API with an older communication protocol version + result2 = nh2_client.api.services.dummy.get() + assert isinstance(result2, list) + + # Validate the data received + for data in result2: + assert isinstance(data, klass_v1) + assert data.name == sample_data.full_name + assert data.version == int(sample_data.version) From c593ef3d577a4fe827dd33702aad8b71e59d8b1a Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 12 Oct 2023 10:14:17 +0530 Subject: [PATCH 64/67] remove mock service config from ServiceConfigRegistry at end of test --- .../tests/syft/migrations/protocol_communication_test.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/syft/tests/syft/migrations/protocol_communication_test.py b/packages/syft/tests/syft/migrations/protocol_communication_test.py index 4e44727395a..254a2033873 100644 --- a/packages/syft/tests/syft/migrations/protocol_communication_test.py +++ b/packages/syft/tests/syft/migrations/protocol_communication_test.py @@ -14,6 +14,7 @@ from syft.service.context import AuthedServiceContext from syft.service.response import SyftError from syft.service.service import AbstractService +from syft.service.service import ServiceConfigRegistry from syft.service.service import service_method from syft.service.user.user_roles import GUEST_ROLE_LEVEL from syft.store.document_store import BaseStash @@ -29,7 +30,7 @@ from syft.types.uid import UID from syft.util.util import index_syft_by_module_name -MY_TEST_TYPE_BANK = deepcopy(TYPE_BANK) +MOCK_TYPE_BANK = deepcopy(TYPE_BANK) def get_klass_version_1(): @@ -166,11 +167,10 @@ def patched_index_syft_by_module_name(fully_qualified_name: str): return index_syft_by_module_name(fully_qualified_name) node_name = UID().to_string() - - with mock.patch("syft.serde.recursive.TYPE_BANK", MY_TEST_TYPE_BANK): + with mock.patch("syft.serde.recursive.TYPE_BANK", MOCK_TYPE_BANK): with mock.patch( "syft.protocol.data_protocol.TYPE_BANK", - MY_TEST_TYPE_BANK, + MOCK_TYPE_BANK, ): with mock.patch( "syft.client.api.index_syft_by_module_name", @@ -230,3 +230,4 @@ def patched_index_syft_by_module_name(fully_qualified_name: str): assert isinstance(data, klass_v1) assert data.name == sample_data.full_name assert data.version == int(sample_data.version) + ServiceConfigRegistry.__service_config_registry__.pop("dummy.syft_object", None) From 62e358b95e14e8dae331bc14898226f6ed8fdb5b Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Thu, 12 Oct 2023 17:37:32 +1000 Subject: [PATCH 65/67] Added check_or_stage_protocol and tox task for check and bumping - Added check and bump protocol to CI --- .github/workflows/cd-syft.yml | 7 ++++++- ...nup-notebooks.yml => post-merge-tasks.yml} | 13 +++++++++---- .github/workflows/pr-tests-linting.yml | 4 ++++ packages/syft/src/syft/__init__.py | 1 + .../syft/src/syft/protocol/data_protocol.py | 17 +++++++++++++++++ tox.ini | 19 ++++++++++++++++++- 6 files changed, 55 insertions(+), 6 deletions(-) rename .github/workflows/{post-merge-cleanup-notebooks.yml => post-merge-tasks.yml} (63%) diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index f57a95a5f44..78cc8c2e5c5 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -86,13 +86,18 @@ jobs: tox -e syft.build.helm tox -e syft.package.helm + - name: Check and Bump Protocol Version + if: github.event_name == 'schedule' + run: | + BUMP=True tox -e syft.protocol.check + - name: Commit changes to Syft uses: EndBug/add-and-commit@v9 with: author_name: ${{ secrets.OM_BOT_NAME }} author_email: ${{ secrets.OM_BOT_EMAIL }} message: "[syft]bump version" - add: "['.bumpversion.cfg', 'VERSION', 'packages/grid/VERSION', 'packages/grid/devspace.yaml', 'packages/syft/src/syft/VERSION', 'packages/syft/setup.cfg', 'packages/grid/frontend/package.json', 'packages/syft/src/syft/__init__.py', 'packages/hagrid/hagrid/manifest_template.yml', 'packages/grid/helm/syft/Chart.yaml', 'packages/grid/helm/repo', 'packages/hagrid/hagrid/deps.py', 'packages/grid/podman/podman-kube/podman-syft-kube.yaml' , 'packages/syftcli/manifest.yml']" + add: "['.bumpversion.cfg', 'VERSION', 'packages/grid/VERSION', 'packages/grid/devspace.yaml', 'packages/syft/src/syft/VERSION', 'packages/syft/setup.cfg', 'packages/grid/frontend/package.json', 'packages/syft/src/syft/__init__.py', 'packages/hagrid/hagrid/manifest_template.yml', 'packages/grid/helm/syft/Chart.yaml', 'packages/grid/helm/repo', 'packages/hagrid/hagrid/deps.py', 'packages/grid/podman/podman-kube/podman-syft-kube.yaml' , 'packages/syftcli/manifest.yml', 'packages/syft/src/syft/protocol/protocol_version.json']" - name: Scheduled Build and Publish if: github.event_name == 'schedule' diff --git a/.github/workflows/post-merge-cleanup-notebooks.yml b/.github/workflows/post-merge-tasks.yml similarity index 63% rename from .github/workflows/post-merge-cleanup-notebooks.yml rename to .github/workflows/post-merge-tasks.yml index 8b7f07ae4c3..860b71b1442 100644 --- a/.github/workflows/post-merge-cleanup-notebooks.yml +++ b/.github/workflows/post-merge-tasks.yml @@ -1,4 +1,4 @@ -name: Post Merge - Cleanup Notebooks +name: Post Merge Tasks on: workflow_call: @@ -7,7 +7,6 @@ on: branches: - dev - main - - "0.8" jobs: post-merge-cleanup-notebooks: @@ -22,11 +21,17 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Commit changes to remove notebooks + - name: Check and Bump Protocol Version + if: github.event_name == 'schedule' + run: | + tox -e syft.protocol.check + + - name: Commit changes to bump protocol and remove notebooks uses: EndBug/add-and-commit@v9 with: author_name: ${{ secrets.OM_BOT_NAME }} author_email: ${{ secrets.OM_BOT_EMAIL }} - message: "cleanup notebooks" + message: "bump protocol and remove notebooks" remove: "-r notebooks/Experimental/" + add: "['packages/syft/src/syft/protocol/protocol_version.json']" commit: "-a" diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 99dda76878c..432af92c9dd 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -47,3 +47,7 @@ jobs: ${{ runner.os }}-pip-py${{ matrix.python-version }}- - uses: pre-commit/action@v3.0.0 + + - name: Check Protocol Version + run: | + tox -e syft.protocol.check diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index cb5cc5d0c90..947b4359a2f 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -33,6 +33,7 @@ from .node.server import serve_node as bind_worker # noqa: F401 from .node.worker import Worker # noqa: F401 from .protocol.data_protocol import bump_protocol_version # noqa: F401 +from .protocol.data_protocol import check_or_stage_protocol # noqa: F401 from .protocol.data_protocol import get_data_protocol # noqa: F401 from .protocol.data_protocol import stage_protocol_changes # noqa: F401 from .serde import NOTHING # noqa: F401 diff --git a/packages/syft/src/syft/protocol/data_protocol.py b/packages/syft/src/syft/protocol/data_protocol.py index ca8e98e8cb3..aaa90f41c77 100644 --- a/packages/syft/src/syft/protocol/data_protocol.py +++ b/packages/syft/src/syft/protocol/data_protocol.py @@ -249,6 +249,18 @@ def bump_protocol_version(self) -> Result[SyftSuccess, SyftError]: self.load_state() return SyftSuccess(message=f"Protocol Updated to {next_highest_protocol}") + def check_protocol(self) -> Result[SyftSuccess, SyftError]: + if len(self.diff) != 0: + return SyftError(message="Protocol Changes Unstanged") + else: + return SyftSuccess(message="Protocol Stable") + + def check_or_stage_protocol(self) -> Result[SyftSuccess, SyftError]: + if not self.check_protocol(): + self.stage_protocol_changes() + result = self.check_protocol() + return result + @property def supported_protocols(self) -> list[Union[int, str]]: """Returns a list of protocol numbers that are marked as supported.""" @@ -297,6 +309,11 @@ def bump_protocol_version() -> Result[SyftSuccess, SyftError]: return data_protocol.bump_protocol_version() +def check_or_stage_protocol() -> Result[SyftSuccess, SyftError]: + data_protocol = get_data_protocol() + return data_protocol.check_or_stage_protocol() + + def debox_arg_and_migrate(arg: Any, protocol_state: dict): """Debox the argument based on whether it is iterable or single entity.""" box_to_result_type = None diff --git a/tox.ini b/tox.ini index 2b673ae183a..5ae5f7c8f3c 100644 --- a/tox.ini +++ b/tox.ini @@ -20,6 +20,7 @@ envlist = syft.build.helm syft.package.helm syft.test.helm + syft.protocol.check syftcli.test.unit syftcli.publish syftcli.build @@ -344,6 +345,23 @@ commands = pip install jupyter jupyterlab --upgrade jupyter lab --ip 0.0.0.0 --ServerApp.token={posargs} +[testenv:syft.protocol.check] +description = Syft Protocol Check +deps = + {[testenv:syft]deps} +changedir = {toxinidir}/packages/syft +allowlist_externals = + bash + +setenv = + BUMP = {env:BUMP:False} +commands = + bash -c "echo Using BUMP=${BUMP}" + python -c 'import syft as sy; sy.check_or_stage_protocol()' + bash -c 'if [[ "$BUMP" != "False" ]]; then \ + python -c "import syft as sy; sy.bump_protocol_version()"; \ + fi' + [testenv:syft.publish] changedir = {toxinidir}/packages/syft description = Build and Publish Syft Wheel @@ -353,7 +371,6 @@ commands = python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' python -m build . - [testenv:syft.test.security] description = Security Checks for Syft changedir = {toxinidir}/packages/syft From a6e499d503c9e19529edd47866dad755ef9ca460 Mon Sep 17 00:00:00 2001 From: Madhava Jay Date: Thu, 12 Oct 2023 17:42:03 +1000 Subject: [PATCH 66/67] Added tox to linting task --- .github/workflows/pr-tests-linting.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-tests-linting.yml b/.github/workflows/pr-tests-linting.yml index 432af92c9dd..41ec391eff9 100644 --- a/.github/workflows/pr-tests-linting.yml +++ b/.github/workflows/pr-tests-linting.yml @@ -27,9 +27,9 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Upgrade pip + - name: Install pip packages run: | - python -m pip install --upgrade --user pip + python -m pip install --upgrade --user pip tox - name: Get pip cache dir id: pip-cache From 7a3f90daae37b6caaee074ffa5de91b6f27d402d Mon Sep 17 00:00:00 2001 From: Shubham Gupta Date: Thu, 12 Oct 2023 13:28:32 +0530 Subject: [PATCH 67/67] skip bump in case of beta releases --- .github/workflows/cd-syft.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index 78cc8c2e5c5..23436403538 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -89,7 +89,7 @@ jobs: - name: Check and Bump Protocol Version if: github.event_name == 'schedule' run: | - BUMP=True tox -e syft.protocol.check + tox -e syft.protocol.check - name: Commit changes to Syft uses: EndBug/add-and-commit@v9