From fe8b57f1d3c98508a55e38e1d4bdfd5423889326 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:08:09 +0200 Subject: [PATCH 01/16] moved exceptions to single file --- client/ayon_core/pipeline/create/__init__.py | 16 ++- client/ayon_core/pipeline/create/context.py | 128 ++---------------- .../pipeline/create/creator_plugins.py | 10 -- .../ayon_core/pipeline/create/exceptions.py | 114 ++++++++++++++++ 4 files changed, 140 insertions(+), 128 deletions(-) create mode 100644 client/ayon_core/pipeline/create/exceptions.py diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index da9cafad5a..68e173d6b9 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -4,6 +4,20 @@ PRE_CREATE_THUMBNAIL_KEY, DEFAULT_VARIANT_VALUE, ) +from .exceptions import ( + UnavailableSharedData, + ImmutableKeyError, + HostMissRequiredMethod, + ConvertorsOperationFailed, + ConvertorsFindFailed, + ConvertorsConversionFailed, + CreatorError, + CreatorsCreateFailed, + CreatorsCollectionFailed, + CreatorsSaveFailed, + CreatorsRemoveFailed, + CreatorsOperationFailed, +) from .utils import ( get_last_versions_for_instances, @@ -17,8 +31,6 @@ ) from .creator_plugins import ( - CreatorError, - BaseCreator, Creator, AutoCreator, diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 1c64d22733..0dd8ed1bd1 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -29,12 +29,23 @@ ) from ayon_core.pipeline.plugin_discover import DiscoverResult +from .exceptions import ( + CreatorError, + ImmutableKeyError, + CreatorsCreateFailed, + CreatorsCollectionFailed, + CreatorsSaveFailed, + CreatorsRemoveFailed, + ConvertorsFindFailed, + ConvertorsConversionFailed, + UnavailableSharedData, + HostMissRequiredMethod, +) from .creator_plugins import ( Creator, AutoCreator, discover_creator_plugins, discover_convertor_plugins, - CreatorError, ) # Changes of instances and context are send as tuple of 2 information @@ -42,68 +53,6 @@ _NOT_SET = object() -class UnavailableSharedData(Exception): - """Shared data are not available at the moment when are accessed.""" - pass - - -class ImmutableKeyError(TypeError): - """Accessed key is immutable so does not allow changes or removals.""" - - def __init__(self, key, msg=None): - self.immutable_key = key - if not msg: - msg = "Key \"{}\" is immutable and does not allow changes.".format( - key - ) - super(ImmutableKeyError, self).__init__(msg) - - -class HostMissRequiredMethod(Exception): - """Host does not have implemented required functions for creation.""" - - def __init__(self, host, missing_methods): - self.missing_methods = missing_methods - self.host = host - joined_methods = ", ".join( - ['"{}"'.format(name) for name in missing_methods] - ) - dirpath = os.path.dirname( - os.path.normpath(inspect.getsourcefile(host)) - ) - dirpath_parts = dirpath.split(os.path.sep) - host_name = dirpath_parts.pop(-1) - if host_name == "api": - host_name = dirpath_parts.pop(-1) - - msg = "Host \"{}\" does not have implemented method/s {}".format( - host_name, joined_methods - ) - super(HostMissRequiredMethod, self).__init__(msg) - - -class ConvertorsOperationFailed(Exception): - def __init__(self, msg, failed_info): - super(ConvertorsOperationFailed, self).__init__(msg) - self.failed_info = failed_info - - -class ConvertorsFindFailed(ConvertorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to find incompatible products" - super(ConvertorsFindFailed, self).__init__( - msg, failed_info - ) - - -class ConvertorsConversionFailed(ConvertorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to convert incompatible products" - super(ConvertorsConversionFailed, self).__init__( - msg, failed_info - ) - - def prepare_failed_convertor_operation_info(identifier, exc_info): exc_type, exc_value, exc_traceback = exc_info formatted_traceback = "".join(traceback.format_exception( @@ -117,59 +66,6 @@ def prepare_failed_convertor_operation_info(identifier, exc_info): } -class CreatorsOperationFailed(Exception): - """Raised when a creator process crashes in 'CreateContext'. - - The exception contains information about the creator and error. The data - are prepared using 'prepare_failed_creator_operation_info' and can be - serialized using json. - - Usage is for UI purposes which may not have access to exceptions directly - and would not have ability to catch exceptions 'per creator'. - - Args: - msg (str): General error message. - failed_info (list[dict[str, Any]]): List of failed creators with - exception message and optionally formatted traceback. - """ - - def __init__(self, msg, failed_info): - super(CreatorsOperationFailed, self).__init__(msg) - self.failed_info = failed_info - - -class CreatorsCollectionFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to collect instances" - super(CreatorsCollectionFailed, self).__init__( - msg, failed_info - ) - - -class CreatorsSaveFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed update instance changes" - super(CreatorsSaveFailed, self).__init__( - msg, failed_info - ) - - -class CreatorsRemoveFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to remove instances" - super(CreatorsRemoveFailed, self).__init__( - msg, failed_info - ) - - -class CreatorsCreateFailed(CreatorsOperationFailed): - def __init__(self, failed_info): - msg = "Failed to create instances" - super(CreatorsCreateFailed, self).__init__( - msg, failed_info - ) - - def prepare_failed_creator_operation_info( identifier, label, exc_info, add_traceback=True ): diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index 624f1c9588..1e09eb62a1 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -26,16 +26,6 @@ from .context import CreateContext, CreatedInstance, UpdateData # noqa: F401 -class CreatorError(Exception): - """Should be raised when creator failed because of known issue. - - Message of error should be user readable. - """ - - def __init__(self, message): - super(CreatorError, self).__init__(message) - - class ProductConvertorPlugin(ABC): """Helper for conversion of instances created using legacy creators. diff --git a/client/ayon_core/pipeline/create/exceptions.py b/client/ayon_core/pipeline/create/exceptions.py new file mode 100644 index 0000000000..24264840cb --- /dev/null +++ b/client/ayon_core/pipeline/create/exceptions.py @@ -0,0 +1,114 @@ +import os +import inspect + + +class UnavailableSharedData(Exception): + """Shared data are not available at the moment when are accessed.""" + pass + + +class ImmutableKeyError(TypeError): + """Accessed key is immutable so does not allow changes or removals.""" + + def __init__(self, key, msg=None): + self.immutable_key = key + if not msg: + msg = "Key \"{}\" is immutable and does not allow changes.".format( + key + ) + super().__init__(msg) + + +class HostMissRequiredMethod(Exception): + """Host does not have implemented required functions for creation.""" + + def __init__(self, host, missing_methods): + self.missing_methods = missing_methods + self.host = host + joined_methods = ", ".join( + ['"{}"'.format(name) for name in missing_methods] + ) + dirpath = os.path.dirname( + os.path.normpath(inspect.getsourcefile(host)) + ) + dirpath_parts = dirpath.split(os.path.sep) + host_name = dirpath_parts.pop(-1) + if host_name == "api": + host_name = dirpath_parts.pop(-1) + + msg = "Host \"{}\" does not have implemented method/s {}".format( + host_name, joined_methods + ) + super().__init__(msg) + + +class ConvertorsOperationFailed(Exception): + def __init__(self, msg, failed_info): + super().__init__(msg) + self.failed_info = failed_info + + +class ConvertorsFindFailed(ConvertorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to find incompatible products" + super().__init__(msg, failed_info) + + +class ConvertorsConversionFailed(ConvertorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to convert incompatible products" + super().__init__(msg, failed_info) + + +class CreatorError(Exception): + """Should be raised when creator failed because of known issue. + + Message of error should be artist friendly. + """ + pass + + +class CreatorsOperationFailed(Exception): + """Raised when a creator process crashes in 'CreateContext'. + + The exception contains information about the creator and error. The data + are prepared using 'prepare_failed_creator_operation_info' and can be + serialized using json. + + Usage is for UI purposes which may not have access to exceptions directly + and would not have ability to catch exceptions 'per creator'. + + Args: + msg (str): General error message. + failed_info (list[dict[str, Any]]): List of failed creators with + exception message and optionally formatted traceback. + """ + + def __init__(self, msg, failed_info): + super().__init__(msg) + self.failed_info = failed_info + + +class CreatorsCollectionFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to collect instances" + super().__init__(msg, failed_info) + + +class CreatorsSaveFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed update instance changes" + super().__init__(msg, failed_info) + + +class CreatorsRemoveFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to remove instances" + super().__init__(msg, failed_info) + + +class CreatorsCreateFailed(CreatorsOperationFailed): + def __init__(self, failed_info): + msg = "Failed to create instances" + super().__init__(msg, failed_info) + From 558cc13cdc10942334d5815829792eb078d67c27 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:10:15 +0200 Subject: [PATCH 02/16] move 'TrackChangesItem' to separate file --- client/ayon_core/pipeline/create/changes.py | 313 +++++++++++++++++++ client/ayon_core/pipeline/create/context.py | 314 +------------------- 2 files changed, 314 insertions(+), 313 deletions(-) create mode 100644 client/ayon_core/pipeline/create/changes.py diff --git a/client/ayon_core/pipeline/create/changes.py b/client/ayon_core/pipeline/create/changes.py new file mode 100644 index 0000000000..217478ee30 --- /dev/null +++ b/client/ayon_core/pipeline/create/changes.py @@ -0,0 +1,313 @@ +import copy + +_EMPTY_VALUE = object() + + +class TrackChangesItem(object): + """Helper object to track changes in data. + + Has access to full old and new data and will create deep copy of them, + so it is not needed to create copy before passed in. + + Can work as a dictionary if old or new value is a dictionary. In + that case received object is another object of 'TrackChangesItem'. + + Goal is to be able to get old or new value as was or only changed values + or get information about removed/changed keys, and all of that on + any "dictionary level". + + ``` + # Example of possible usages + >>> old_value = { + ... "key_1": "value_1", + ... "key_2": { + ... "key_sub_1": 1, + ... "key_sub_2": { + ... "enabled": True + ... } + ... }, + ... "key_3": "value_2" + ... } + >>> new_value = { + ... "key_1": "value_1", + ... "key_2": { + ... "key_sub_2": { + ... "enabled": False + ... }, + ... "key_sub_3": 3 + ... }, + ... "key_3": "value_3" + ... } + + >>> changes = TrackChangesItem(old_value, new_value) + >>> changes.changed + True + + >>> changes["key_2"]["key_sub_1"].new_value is None + True + + >>> list(sorted(changes.changed_keys)) + ['key_2', 'key_3'] + + >>> changes["key_2"]["key_sub_2"]["enabled"].changed + True + + >>> changes["key_2"].removed_keys + {'key_sub_1'} + + >>> list(sorted(changes["key_2"].available_keys)) + ['key_sub_1', 'key_sub_2', 'key_sub_3'] + + >>> changes.new_value == new_value + True + + # Get only changed values + only_changed_new_values = { + key: changes[key].new_value + for key in changes.changed_keys + } + ``` + + Args: + old_value (Any): Old value. + new_value (Any): New value. + """ + + def __init__(self, old_value, new_value): + self._changed = old_value != new_value + # Resolve if value is '_EMPTY_VALUE' after comparison of the values + if old_value is _EMPTY_VALUE: + old_value = None + if new_value is _EMPTY_VALUE: + new_value = None + self._old_value = copy.deepcopy(old_value) + self._new_value = copy.deepcopy(new_value) + + self._old_is_dict = isinstance(old_value, dict) + self._new_is_dict = isinstance(new_value, dict) + + self._old_keys = None + self._new_keys = None + self._available_keys = None + self._removed_keys = None + + self._changed_keys = None + + self._sub_items = None + + def __getitem__(self, key): + """Getter looks into subitems if object is dictionary.""" + + if self._sub_items is None: + self._prepare_sub_items() + return self._sub_items[key] + + def __bool__(self): + """Boolean of object is if old and new value are the same.""" + + return self._changed + + def get(self, key, default=None): + """Try to get sub item.""" + + if self._sub_items is None: + self._prepare_sub_items() + return self._sub_items.get(key, default) + + @property + def old_value(self): + """Get copy of old value. + + Returns: + Any: Whatever old value was. + """ + + return copy.deepcopy(self._old_value) + + @property + def new_value(self): + """Get copy of new value. + + Returns: + Any: Whatever new value was. + """ + + return copy.deepcopy(self._new_value) + + @property + def changed(self): + """Value changed. + + Returns: + bool: If data changed. + """ + + return self._changed + + @property + def is_dict(self): + """Object can be used as dictionary. + + Returns: + bool: When can be used that way. + """ + + return self._old_is_dict or self._new_is_dict + + @property + def changes(self): + """Get changes in raw data. + + This method should be used only if 'is_dict' value is 'True'. + + Returns: + Dict[str, Tuple[Any, Any]]: Changes are by key in tuple + (, ). If 'is_dict' is 'False' then + output is always empty dictionary. + """ + + output = {} + if not self.is_dict: + return output + + old_value = self.old_value + new_value = self.new_value + for key in self.changed_keys: + _old = None + _new = None + if self._old_is_dict: + _old = old_value.get(key) + if self._new_is_dict: + _new = new_value.get(key) + output[key] = (_old, _new) + return output + + # Methods/properties that can be used when 'is_dict' is 'True' + @property + def old_keys(self): + """Keys from old value. + + Empty set is returned if old value is not a dict. + + Returns: + Set[str]: Keys from old value. + """ + + if self._old_keys is None: + self._prepare_keys() + return set(self._old_keys) + + @property + def new_keys(self): + """Keys from new value. + + Empty set is returned if old value is not a dict. + + Returns: + Set[str]: Keys from new value. + """ + + if self._new_keys is None: + self._prepare_keys() + return set(self._new_keys) + + @property + def changed_keys(self): + """Keys that has changed from old to new value. + + Empty set is returned if both old and new value are not a dict. + + Returns: + Set[str]: Keys of changed keys. + """ + + if self._changed_keys is None: + self._prepare_sub_items() + return set(self._changed_keys) + + @property + def available_keys(self): + """All keys that are available in old and new value. + + Empty set is returned if both old and new value are not a dict. + Output is Union of 'old_keys' and 'new_keys'. + + Returns: + Set[str]: All keys from old and new value. + """ + + if self._available_keys is None: + self._prepare_keys() + return set(self._available_keys) + + @property + def removed_keys(self): + """Key that are not available in new value but were in old value. + + Returns: + Set[str]: All removed keys. + """ + + if self._removed_keys is None: + self._prepare_sub_items() + return set(self._removed_keys) + + def _prepare_keys(self): + old_keys = set() + new_keys = set() + if self._old_is_dict and self._new_is_dict: + old_keys = set(self._old_value.keys()) + new_keys = set(self._new_value.keys()) + + elif self._old_is_dict: + old_keys = set(self._old_value.keys()) + + elif self._new_is_dict: + new_keys = set(self._new_value.keys()) + + self._old_keys = old_keys + self._new_keys = new_keys + self._available_keys = old_keys | new_keys + self._removed_keys = old_keys - new_keys + + def _prepare_sub_items(self): + sub_items = {} + changed_keys = set() + + old_keys = self.old_keys + new_keys = self.new_keys + new_value = self.new_value + old_value = self.old_value + if self._old_is_dict and self._new_is_dict: + for key in self.available_keys: + item = TrackChangesItem( + old_value.get(key), new_value.get(key) + ) + sub_items[key] = item + if item.changed or key not in old_keys or key not in new_keys: + changed_keys.add(key) + + elif self._old_is_dict: + old_keys = set(old_value.keys()) + available_keys = set(old_keys) + changed_keys = set(available_keys) + for key in available_keys: + # NOTE Use '_EMPTY_VALUE' because old value could be 'None' + # which would result in "unchanged" item + sub_items[key] = TrackChangesItem( + old_value.get(key), _EMPTY_VALUE + ) + + elif self._new_is_dict: + new_keys = set(new_value.keys()) + available_keys = set(new_keys) + changed_keys = set(available_keys) + for key in available_keys: + # NOTE Use '_EMPTY_VALUE' because new value could be 'None' + # which would result in "unchanged" item + sub_items[key] = TrackChangesItem( + _EMPTY_VALUE, new_value.get(key) + ) + + self._sub_items = sub_items + self._changed_keys = changed_keys diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 0dd8ed1bd1..6f802a5a6e 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -41,6 +41,7 @@ UnavailableSharedData, HostMissRequiredMethod, ) +from .changes import TrackChangesItem from .creator_plugins import ( Creator, AutoCreator, @@ -84,319 +85,6 @@ def prepare_failed_creator_operation_info( } -_EMPTY_VALUE = object() - - -class TrackChangesItem(object): - """Helper object to track changes in data. - - Has access to full old and new data and will create deep copy of them, - so it is not needed to create copy before passed in. - - Can work as a dictionary if old or new value is a dictionary. In - that case received object is another object of 'TrackChangesItem'. - - Goal is to be able to get old or new value as was or only changed values - or get information about removed/changed keys, and all of that on - any "dictionary level". - - ``` - # Example of possible usages - >>> old_value = { - ... "key_1": "value_1", - ... "key_2": { - ... "key_sub_1": 1, - ... "key_sub_2": { - ... "enabled": True - ... } - ... }, - ... "key_3": "value_2" - ... } - >>> new_value = { - ... "key_1": "value_1", - ... "key_2": { - ... "key_sub_2": { - ... "enabled": False - ... }, - ... "key_sub_3": 3 - ... }, - ... "key_3": "value_3" - ... } - - >>> changes = TrackChangesItem(old_value, new_value) - >>> changes.changed - True - - >>> changes["key_2"]["key_sub_1"].new_value is None - True - - >>> list(sorted(changes.changed_keys)) - ['key_2', 'key_3'] - - >>> changes["key_2"]["key_sub_2"]["enabled"].changed - True - - >>> changes["key_2"].removed_keys - {'key_sub_1'} - - >>> list(sorted(changes["key_2"].available_keys)) - ['key_sub_1', 'key_sub_2', 'key_sub_3'] - - >>> changes.new_value == new_value - True - - # Get only changed values - only_changed_new_values = { - key: changes[key].new_value - for key in changes.changed_keys - } - ``` - - Args: - old_value (Any): Old value. - new_value (Any): New value. - """ - - def __init__(self, old_value, new_value): - self._changed = old_value != new_value - # Resolve if value is '_EMPTY_VALUE' after comparison of the values - if old_value is _EMPTY_VALUE: - old_value = None - if new_value is _EMPTY_VALUE: - new_value = None - self._old_value = copy.deepcopy(old_value) - self._new_value = copy.deepcopy(new_value) - - self._old_is_dict = isinstance(old_value, dict) - self._new_is_dict = isinstance(new_value, dict) - - self._old_keys = None - self._new_keys = None - self._available_keys = None - self._removed_keys = None - - self._changed_keys = None - - self._sub_items = None - - def __getitem__(self, key): - """Getter looks into subitems if object is dictionary.""" - - if self._sub_items is None: - self._prepare_sub_items() - return self._sub_items[key] - - def __bool__(self): - """Boolean of object is if old and new value are the same.""" - - return self._changed - - def get(self, key, default=None): - """Try to get sub item.""" - - if self._sub_items is None: - self._prepare_sub_items() - return self._sub_items.get(key, default) - - @property - def old_value(self): - """Get copy of old value. - - Returns: - Any: Whatever old value was. - """ - - return copy.deepcopy(self._old_value) - - @property - def new_value(self): - """Get copy of new value. - - Returns: - Any: Whatever new value was. - """ - - return copy.deepcopy(self._new_value) - - @property - def changed(self): - """Value changed. - - Returns: - bool: If data changed. - """ - - return self._changed - - @property - def is_dict(self): - """Object can be used as dictionary. - - Returns: - bool: When can be used that way. - """ - - return self._old_is_dict or self._new_is_dict - - @property - def changes(self): - """Get changes in raw data. - - This method should be used only if 'is_dict' value is 'True'. - - Returns: - Dict[str, Tuple[Any, Any]]: Changes are by key in tuple - (, ). If 'is_dict' is 'False' then - output is always empty dictionary. - """ - - output = {} - if not self.is_dict: - return output - - old_value = self.old_value - new_value = self.new_value - for key in self.changed_keys: - _old = None - _new = None - if self._old_is_dict: - _old = old_value.get(key) - if self._new_is_dict: - _new = new_value.get(key) - output[key] = (_old, _new) - return output - - # Methods/properties that can be used when 'is_dict' is 'True' - @property - def old_keys(self): - """Keys from old value. - - Empty set is returned if old value is not a dict. - - Returns: - Set[str]: Keys from old value. - """ - - if self._old_keys is None: - self._prepare_keys() - return set(self._old_keys) - - @property - def new_keys(self): - """Keys from new value. - - Empty set is returned if old value is not a dict. - - Returns: - Set[str]: Keys from new value. - """ - - if self._new_keys is None: - self._prepare_keys() - return set(self._new_keys) - - @property - def changed_keys(self): - """Keys that has changed from old to new value. - - Empty set is returned if both old and new value are not a dict. - - Returns: - Set[str]: Keys of changed keys. - """ - - if self._changed_keys is None: - self._prepare_sub_items() - return set(self._changed_keys) - - @property - def available_keys(self): - """All keys that are available in old and new value. - - Empty set is returned if both old and new value are not a dict. - Output is Union of 'old_keys' and 'new_keys'. - - Returns: - Set[str]: All keys from old and new value. - """ - - if self._available_keys is None: - self._prepare_keys() - return set(self._available_keys) - - @property - def removed_keys(self): - """Key that are not available in new value but were in old value. - - Returns: - Set[str]: All removed keys. - """ - - if self._removed_keys is None: - self._prepare_sub_items() - return set(self._removed_keys) - - def _prepare_keys(self): - old_keys = set() - new_keys = set() - if self._old_is_dict and self._new_is_dict: - old_keys = set(self._old_value.keys()) - new_keys = set(self._new_value.keys()) - - elif self._old_is_dict: - old_keys = set(self._old_value.keys()) - - elif self._new_is_dict: - new_keys = set(self._new_value.keys()) - - self._old_keys = old_keys - self._new_keys = new_keys - self._available_keys = old_keys | new_keys - self._removed_keys = old_keys - new_keys - - def _prepare_sub_items(self): - sub_items = {} - changed_keys = set() - - old_keys = self.old_keys - new_keys = self.new_keys - new_value = self.new_value - old_value = self.old_value - if self._old_is_dict and self._new_is_dict: - for key in self.available_keys: - item = TrackChangesItem( - old_value.get(key), new_value.get(key) - ) - sub_items[key] = item - if item.changed or key not in old_keys or key not in new_keys: - changed_keys.add(key) - - elif self._old_is_dict: - old_keys = set(old_value.keys()) - available_keys = set(old_keys) - changed_keys = set(available_keys) - for key in available_keys: - # NOTE Use '_EMPTY_VALUE' because old value could be 'None' - # which would result in "unchanged" item - sub_items[key] = TrackChangesItem( - old_value.get(key), _EMPTY_VALUE - ) - - elif self._new_is_dict: - new_keys = set(new_value.keys()) - available_keys = set(new_keys) - changed_keys = set(available_keys) - for key in available_keys: - # NOTE Use '_EMPTY_VALUE' because new value could be 'None' - # which would result in "unchanged" item - sub_items[key] = TrackChangesItem( - _EMPTY_VALUE, new_value.get(key) - ) - - self._sub_items = sub_items - self._changed_keys = changed_keys - - class InstanceMember: """Representation of instance member. From f41a830f437d000c0f83f44765c6a5e00b25a137 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:17:27 +0200 Subject: [PATCH 03/16] move structure classes to separate file --- client/ayon_core/pipeline/create/__init__.py | 23 +- client/ayon_core/pipeline/create/context.py | 866 +---------------- .../ayon_core/pipeline/create/structures.py | 870 ++++++++++++++++++ 3 files changed, 889 insertions(+), 870 deletions(-) create mode 100644 client/ayon_core/pipeline/create/structures.py diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index 68e173d6b9..bb05bc6a09 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -18,7 +18,7 @@ CreatorsRemoveFailed, CreatorsOperationFailed, ) - +from .structures import CreatedInstance from .utils import ( get_last_versions_for_instances, get_next_versions_for_instances, @@ -48,10 +48,7 @@ cache_and_get_instances, ) -from .context import ( - CreatedInstance, - CreateContext -) +from .context import CreateContext from .legacy_create import ( LegacyCreator, @@ -65,6 +62,21 @@ "PRE_CREATE_THUMBNAIL_KEY", "DEFAULT_VARIANT_VALUE", + "UnavailableSharedData", + "ImmutableKeyError", + "HostMissRequiredMethod", + "ConvertorsOperationFailed", + "ConvertorsFindFailed", + "ConvertorsConversionFailed", + "CreatorError", + "CreatorsCreateFailed", + "CreatorsCollectionFailed", + "CreatorsSaveFailed", + "CreatorsRemoveFailed", + "CreatorsOperationFailed", + + "CreatedInstance", + "get_last_versions_for_instances", "get_next_versions_for_instances", @@ -90,7 +102,6 @@ "cache_and_get_instances", - "CreatedInstance", "CreateContext", "LegacyCreator", diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 6f802a5a6e..a11bc311dc 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -5,7 +5,6 @@ import traceback import collections import inspect -from uuid import uuid4 from contextlib import contextmanager from typing import Optional @@ -16,22 +15,14 @@ from ayon_core.settings import get_project_settings from ayon_core.lib import is_func_signature_supported from ayon_core.lib.attribute_definitions import ( - UnknownDef, - serialize_attr_defs, - deserialize_attr_defs, get_default_values, ) from ayon_core.host import IPublishHost, IWorkfileHost -from ayon_core.pipeline import ( - Anatomy, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, -) +from ayon_core.pipeline import Anatomy from ayon_core.pipeline.plugin_discover import DiscoverResult from .exceptions import ( CreatorError, - ImmutableKeyError, CreatorsCreateFailed, CreatorsCollectionFailed, CreatorsSaveFailed, @@ -42,6 +33,7 @@ HostMissRequiredMethod, ) from .changes import TrackChangesItem +from .structures import PublishAttributes, ConvertorItem from .creator_plugins import ( Creator, AutoCreator, @@ -85,860 +77,6 @@ def prepare_failed_creator_operation_info( } -class InstanceMember: - """Representation of instance member. - - TODO: - Implement and use! - """ - - def __init__(self, instance, name): - self.instance = instance - - instance.add_members(self) - - self.name = name - self._actions = [] - - def add_action(self, label, callback): - self._actions.append({ - "label": label, - "callback": callback - }) - - -class AttributeValues(object): - """Container which keep values of Attribute definitions. - - Goal is to have one object which hold values of attribute definitions for - single instance. - - Has dictionary like methods. Not all of them are allowed all the time. - - Args: - attr_defs(AbstractAttrDef): Definitions of value type and properties. - values(dict): Values after possible conversion. - origin_data(dict): Values loaded from host before conversion. - """ - - def __init__(self, attr_defs, values, origin_data=None): - if origin_data is None: - origin_data = copy.deepcopy(values) - self._origin_data = origin_data - - attr_defs_by_key = { - attr_def.key: attr_def - for attr_def in attr_defs - if attr_def.is_value_def - } - for key, value in values.items(): - if key not in attr_defs_by_key: - new_def = UnknownDef(key, label=key, default=value) - attr_defs.append(new_def) - attr_defs_by_key[key] = new_def - - self._attr_defs = attr_defs - self._attr_defs_by_key = attr_defs_by_key - - self._data = {} - for attr_def in attr_defs: - value = values.get(attr_def.key) - if value is not None: - self._data[attr_def.key] = value - - def __setitem__(self, key, value): - if key not in self._attr_defs_by_key: - raise KeyError("Key \"{}\" was not found.".format(key)) - - old_value = self._data.get(key) - if old_value == value: - return - self._data[key] = value - - def __getitem__(self, key): - if key not in self._attr_defs_by_key: - return self._data[key] - return self._data.get(key, self._attr_defs_by_key[key].default) - - def __contains__(self, key): - return key in self._attr_defs_by_key - - def get(self, key, default=None): - if key in self._attr_defs_by_key: - return self[key] - return default - - def keys(self): - return self._attr_defs_by_key.keys() - - def values(self): - for key in self._attr_defs_by_key.keys(): - yield self._data.get(key) - - def items(self): - for key in self._attr_defs_by_key.keys(): - yield key, self._data.get(key) - - def update(self, value): - for _key, _value in dict(value): - self[_key] = _value - - def pop(self, key, default=None): - value = self._data.pop(key, default) - # Remove attribute definition if is 'UnknownDef' - # - gives option to get rid of unknown values - attr_def = self._attr_defs_by_key.get(key) - if isinstance(attr_def, UnknownDef): - self._attr_defs_by_key.pop(key) - self._attr_defs.remove(attr_def) - return value - - def reset_values(self): - self._data = {} - - def mark_as_stored(self): - self._origin_data = copy.deepcopy(self._data) - - @property - def attr_defs(self): - """Pointer to attribute definitions. - - Returns: - List[AbstractAttrDef]: Attribute definitions. - """ - - return list(self._attr_defs) - - @property - def origin_data(self): - return copy.deepcopy(self._origin_data) - - def data_to_store(self): - """Create new dictionary with data to store. - - Returns: - Dict[str, Any]: Attribute values that should be stored. - """ - - output = {} - for key in self._data: - output[key] = self[key] - - for key, attr_def in self._attr_defs_by_key.items(): - if key not in output: - output[key] = attr_def.default - return output - - def get_serialized_attr_defs(self): - """Serialize attribute definitions to json serializable types. - - Returns: - List[Dict[str, Any]]: Serialized attribute definitions. - """ - - return serialize_attr_defs(self._attr_defs) - - -class CreatorAttributeValues(AttributeValues): - """Creator specific attribute values of an instance. - - Args: - instance (CreatedInstance): Instance for which are values hold. - """ - - def __init__(self, instance, *args, **kwargs): - self.instance = instance - super(CreatorAttributeValues, self).__init__(*args, **kwargs) - - -class PublishAttributeValues(AttributeValues): - """Publish plugin specific attribute values. - - Values are for single plugin which can be on `CreatedInstance` - or context values stored on `CreateContext`. - - Args: - publish_attributes(PublishAttributes): Wrapper for multiple publish - attributes is used as parent object. - """ - - def __init__(self, publish_attributes, *args, **kwargs): - self.publish_attributes = publish_attributes - super(PublishAttributeValues, self).__init__(*args, **kwargs) - - @property - def parent(self): - return self.publish_attributes.parent - - -class PublishAttributes: - """Wrapper for publish plugin attribute definitions. - - Cares about handling attribute definitions of multiple publish plugins. - Keep information about attribute definitions and their values. - - Args: - parent(CreatedInstance, CreateContext): Parent for which will be - data stored and from which are data loaded. - origin_data(dict): Loaded data by plugin class name. - attr_plugins(Union[List[pyblish.api.Plugin], None]): List of publish - plugins that may have defined attribute definitions. - """ - - def __init__(self, parent, origin_data, attr_plugins=None): - self.parent = parent - self._origin_data = copy.deepcopy(origin_data) - - attr_plugins = attr_plugins or [] - self.attr_plugins = attr_plugins - - self._data = copy.deepcopy(origin_data) - self._plugin_names_order = [] - self._missing_plugins = [] - - self.set_publish_plugins(attr_plugins) - - def __getitem__(self, key): - return self._data[key] - - def __contains__(self, key): - return key in self._data - - def keys(self): - return self._data.keys() - - def values(self): - return self._data.values() - - def items(self): - return self._data.items() - - def pop(self, key, default=None): - """Remove or reset value for plugin. - - Plugin values are reset to defaults if plugin is available but - data of plugin which was not found are removed. - - Args: - key(str): Plugin name. - default: Default value if plugin was not found. - """ - - if key not in self._data: - return default - - if key in self._missing_plugins: - self._missing_plugins.remove(key) - removed_item = self._data.pop(key) - return removed_item.data_to_store() - - value_item = self._data[key] - # Prepare value to return - output = value_item.data_to_store() - # Reset values - value_item.reset_values() - return output - - def plugin_names_order(self): - """Plugin names order by their 'order' attribute.""" - - for name in self._plugin_names_order: - yield name - - def mark_as_stored(self): - self._origin_data = copy.deepcopy(self.data_to_store()) - - def data_to_store(self): - """Convert attribute values to "data to store".""" - - output = {} - for key, attr_value in self._data.items(): - output[key] = attr_value.data_to_store() - return output - - @property - def origin_data(self): - return copy.deepcopy(self._origin_data) - - def set_publish_plugins(self, attr_plugins): - """Set publish plugins attribute definitions.""" - - self._plugin_names_order = [] - self._missing_plugins = [] - self.attr_plugins = attr_plugins or [] - - origin_data = self._origin_data - data = self._data - self._data = {} - added_keys = set() - for plugin in attr_plugins: - output = plugin.convert_attribute_values(data) - if output is not None: - data = output - attr_defs = plugin.get_attribute_defs() - if not attr_defs: - continue - - key = plugin.__name__ - added_keys.add(key) - self._plugin_names_order.append(key) - - value = data.get(key) or {} - orig_value = copy.deepcopy(origin_data.get(key) or {}) - self._data[key] = PublishAttributeValues( - self, attr_defs, value, orig_value - ) - - for key, value in data.items(): - if key not in added_keys: - self._missing_plugins.append(key) - self._data[key] = PublishAttributeValues( - self, [], value, value - ) - - def serialize_attributes(self): - return { - "attr_defs": { - plugin_name: attrs_value.get_serialized_attr_defs() - for plugin_name, attrs_value in self._data.items() - }, - "plugin_names_order": self._plugin_names_order, - "missing_plugins": self._missing_plugins - } - - def deserialize_attributes(self, data): - self._plugin_names_order = data["plugin_names_order"] - self._missing_plugins = data["missing_plugins"] - - attr_defs = deserialize_attr_defs(data["attr_defs"]) - - origin_data = self._origin_data - data = self._data - self._data = {} - - added_keys = set() - for plugin_name, attr_defs_data in attr_defs.items(): - attr_defs = deserialize_attr_defs(attr_defs_data) - value = data.get(plugin_name) or {} - orig_value = copy.deepcopy(origin_data.get(plugin_name) or {}) - self._data[plugin_name] = PublishAttributeValues( - self, attr_defs, value, orig_value - ) - - for key, value in data.items(): - if key not in added_keys: - self._missing_plugins.append(key) - self._data[key] = PublishAttributeValues( - self, [], value, value - ) - - -class CreatedInstance: - """Instance entity with data that will be stored to workfile. - - I think `data` must be required argument containing all minimum information - about instance like "folderPath" and "task" and all data used for filling - product name as creators may have custom data for product name filling. - - Notes: - Object have 2 possible initialization. One using 'creator' object which - is recommended for api usage. Second by passing information about - creator. - - Args: - product_type (str): Product type that will be created. - product_name (str): Name of product that will be created. - data (Dict[str, Any]): Data used for filling product name or override - data from already existing instance. - creator (Union[BaseCreator, None]): Creator responsible for instance. - creator_identifier (str): Identifier of creator plugin. - creator_label (str): Creator plugin label. - group_label (str): Default group label from creator plugin. - creator_attr_defs (List[AbstractAttrDef]): Attribute definitions from - creator. - """ - - # Keys that can't be changed or removed from data after loading using - # creator. - # - 'creator_attributes' and 'publish_attributes' can change values of - # their individual children but not on their own - __immutable_keys = ( - "id", - "instance_id", - "product_type", - "creator_identifier", - "creator_attributes", - "publish_attributes" - ) - - def __init__( - self, - product_type, - product_name, - data, - creator=None, - creator_identifier=None, - creator_label=None, - group_label=None, - creator_attr_defs=None, - ): - if creator is not None: - creator_identifier = creator.identifier - group_label = creator.get_group_label() - creator_label = creator.label - creator_attr_defs = creator.get_instance_attr_defs() - - self._creator_label = creator_label - self._group_label = group_label or creator_identifier - - # Instance members may have actions on them - # TODO implement members logic - self._members = [] - - # Data that can be used for lifetime of object - self._transient_data = {} - - # Create a copy of passed data to avoid changing them on the fly - data = copy.deepcopy(data or {}) - - # Pop dictionary values that will be converted to objects to be able - # catch changes - orig_creator_attributes = data.pop("creator_attributes", None) or {} - orig_publish_attributes = data.pop("publish_attributes", None) or {} - - # Store original value of passed data - self._orig_data = copy.deepcopy(data) - - # Pop 'productType' and 'productName' to prevent unexpected changes - data.pop("productType", None) - data.pop("productName", None) - # Backwards compatibility with OpenPype instances - data.pop("family", None) - data.pop("subset", None) - - asset_name = data.pop("asset", None) - if "folderPath" not in data: - data["folderPath"] = asset_name - - # QUESTION Does it make sense to have data stored as ordered dict? - self._data = collections.OrderedDict() - # QUESTION Do we need this "id" information on instance? - item_id = data.get("id") - # TODO use only 'AYON_INSTANCE_ID' when all hosts support it - if item_id not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}: - item_id = AVALON_INSTANCE_ID - self._data["id"] = item_id - self._data["productType"] = product_type - self._data["productName"] = product_name - self._data["active"] = data.get("active", True) - self._data["creator_identifier"] = creator_identifier - - # Pop from source data all keys that are defined in `_data` before - # this moment and through their values away - # - they should be the same and if are not then should not change - # already set values - for key in self._data.keys(): - if key in data: - data.pop(key) - - self._data["variant"] = self._data.get("variant") or "" - # Stored creator specific attribute values - # {key: value} - creator_values = copy.deepcopy(orig_creator_attributes) - - self._data["creator_attributes"] = CreatorAttributeValues( - self, - list(creator_attr_defs), - creator_values, - orig_creator_attributes - ) - - # Stored publish specific attribute values - # {: {key: value}} - # - must be set using 'set_publish_plugins' - self._data["publish_attributes"] = PublishAttributes( - self, orig_publish_attributes, None - ) - if data: - self._data.update(data) - - if not self._data.get("instance_id"): - self._data["instance_id"] = str(uuid4()) - - self._folder_is_valid = self.has_set_folder - self._task_is_valid = self.has_set_task - - def __str__(self): - return ( - " {data}" - ).format( - creator_identifier=self.creator_identifier, - product={"name": self.product_name, "type": self.product_type}, - data=str(self._data) - ) - - # --- Dictionary like methods --- - def __getitem__(self, key): - return self._data[key] - - def __contains__(self, key): - return key in self._data - - def __setitem__(self, key, value): - # Validate immutable keys - if key not in self.__immutable_keys: - self._data[key] = value - - elif value != self._data.get(key): - # Raise exception if key is immutable and value has changed - raise ImmutableKeyError(key) - - def get(self, key, default=None): - return self._data.get(key, default) - - def pop(self, key, *args, **kwargs): - # Raise exception if is trying to pop key which is immutable - if key in self.__immutable_keys: - raise ImmutableKeyError(key) - - self._data.pop(key, *args, **kwargs) - - def keys(self): - return self._data.keys() - - def values(self): - return self._data.values() - - def items(self): - return self._data.items() - # ------ - - @property - def product_type(self): - return self._data["productType"] - - @property - def product_name(self): - return self._data["productName"] - - @property - def label(self): - label = self._data.get("label") - if not label: - label = self.product_name - return label - - @property - def group_label(self): - label = self._data.get("group") - if label: - return label - return self._group_label - - @property - def origin_data(self): - output = copy.deepcopy(self._orig_data) - output["creator_attributes"] = self.creator_attributes.origin_data - output["publish_attributes"] = self.publish_attributes.origin_data - return output - - @property - def creator_identifier(self): - return self._data["creator_identifier"] - - @property - def creator_label(self): - return self._creator_label or self.creator_identifier - - @property - def id(self): - """Instance identifier. - - Returns: - str: UUID of instance. - """ - - return self._data["instance_id"] - - @property - def data(self): - """Legacy access to data. - - Access to data is needed to modify values. - - Returns: - CreatedInstance: Object can be used as dictionary but with - validations of immutable keys. - """ - - return self - - @property - def transient_data(self): - """Data stored for lifetime of instance object. - - These data are not stored to scene and will be lost on object - deletion. - - Can be used to store objects. In some host implementations is not - possible to reference to object in scene with some unique identifier - (e.g. node in Fusion.). In that case it is handy to store the object - here. Should be used that way only if instance data are stored on the - node itself. - - Returns: - Dict[str, Any]: Dictionary object where you can store data related - to instance for lifetime of instance object. - """ - - return self._transient_data - - def changes(self): - """Calculate and return changes.""" - - return TrackChangesItem(self.origin_data, self.data_to_store()) - - def mark_as_stored(self): - """Should be called when instance data are stored. - - Origin data are replaced by current data so changes are cleared. - """ - - orig_keys = set(self._orig_data.keys()) - for key, value in self._data.items(): - orig_keys.discard(key) - if key in ("creator_attributes", "publish_attributes"): - continue - self._orig_data[key] = copy.deepcopy(value) - - for key in orig_keys: - self._orig_data.pop(key) - - self.creator_attributes.mark_as_stored() - self.publish_attributes.mark_as_stored() - - @property - def creator_attributes(self): - return self._data["creator_attributes"] - - @property - def creator_attribute_defs(self): - """Attribute definitions defined by creator plugin. - - Returns: - List[AbstractAttrDef]: Attribute definitions. - """ - - return self.creator_attributes.attr_defs - - @property - def publish_attributes(self): - return self._data["publish_attributes"] - - def data_to_store(self): - """Collect data that contain json parsable types. - - It is possible to recreate the instance using these data. - - Todos: - We probably don't need OrderedDict. When data are loaded they - are not ordered anymore. - - Returns: - OrderedDict: Ordered dictionary with instance data. - """ - - output = collections.OrderedDict() - for key, value in self._data.items(): - if key in ("creator_attributes", "publish_attributes"): - continue - output[key] = value - - output["creator_attributes"] = self.creator_attributes.data_to_store() - output["publish_attributes"] = self.publish_attributes.data_to_store() - - return output - - @classmethod - def from_existing(cls, instance_data, creator): - """Convert instance data from workfile to CreatedInstance. - - Args: - instance_data (Dict[str, Any]): Data in a structure ready for - 'CreatedInstance' object. - creator (BaseCreator): Creator plugin which is creating the - instance of for which the instance belong. - """ - - instance_data = copy.deepcopy(instance_data) - - product_type = instance_data.get("productType") - if product_type is None: - product_type = instance_data.get("family") - if product_type is None: - product_type = creator.product_type - product_name = instance_data.get("productName") - if product_name is None: - product_name = instance_data.get("subset") - - return cls( - product_type, product_name, instance_data, creator - ) - - def set_publish_plugins(self, attr_plugins): - """Set publish plugins with attribute definitions. - - This method should be called only from 'CreateContext'. - - Args: - attr_plugins (List[pyblish.api.Plugin]): Pyblish plugins which - inherit from 'AYONPyblishPluginMixin' and may contain - attribute definitions. - """ - - self.publish_attributes.set_publish_plugins(attr_plugins) - - def add_members(self, members): - """Currently unused method.""" - - for member in members: - if member not in self._members: - self._members.append(member) - - def serialize_for_remote(self): - """Serialize object into data to be possible recreated object. - - Returns: - Dict[str, Any]: Serialized data. - """ - - creator_attr_defs = self.creator_attributes.get_serialized_attr_defs() - publish_attributes = self.publish_attributes.serialize_attributes() - return { - "data": self.data_to_store(), - "orig_data": self.origin_data, - "creator_attr_defs": creator_attr_defs, - "publish_attributes": publish_attributes, - "creator_label": self._creator_label, - "group_label": self._group_label, - } - - @classmethod - def deserialize_on_remote(cls, serialized_data): - """Convert instance data to CreatedInstance. - - This is fake instance in remote process e.g. in UI process. The creator - is not a full creator and should not be used for calling methods when - instance is created from this method (matters on implementation). - - Args: - serialized_data (Dict[str, Any]): Serialized data for remote - recreating. Should contain 'data' and 'orig_data'. - """ - - instance_data = copy.deepcopy(serialized_data["data"]) - creator_identifier = instance_data["creator_identifier"] - - product_type = instance_data["productType"] - product_name = instance_data.get("productName", None) - - creator_label = serialized_data["creator_label"] - group_label = serialized_data["group_label"] - creator_attr_defs = deserialize_attr_defs( - serialized_data["creator_attr_defs"] - ) - publish_attributes = serialized_data["publish_attributes"] - - obj = cls( - product_type, - product_name, - instance_data, - creator_identifier=creator_identifier, - creator_label=creator_label, - group_label=group_label, - creator_attr_defs=creator_attr_defs - ) - obj._orig_data = serialized_data["orig_data"] - obj.publish_attributes.deserialize_attributes(publish_attributes) - - return obj - - # Context validation related methods/properties - @property - def has_set_folder(self): - """Folder path is set in data.""" - - return "folderPath" in self._data - - @property - def has_set_task(self): - """Task name is set in data.""" - - return "task" in self._data - - @property - def has_valid_context(self): - """Context data are valid for publishing.""" - - return self.has_valid_folder and self.has_valid_task - - @property - def has_valid_folder(self): - """Folder set in context exists in project.""" - - if not self.has_set_folder: - return False - return self._folder_is_valid - - @property - def has_valid_task(self): - """Task set in context exists in project.""" - - if not self.has_set_task: - return False - return self._task_is_valid - - def set_folder_invalid(self, invalid): - # TODO replace with `set_folder_path` - self._folder_is_valid = not invalid - - def set_task_invalid(self, invalid): - # TODO replace with `set_task_name` - self._task_is_valid = not invalid - - -class ConvertorItem(object): - """Item representing convertor plugin. - - Args: - identifier (str): Identifier of convertor. - label (str): Label which will be shown in UI. - """ - - def __init__(self, identifier, label): - self._id = str(uuid4()) - self.identifier = identifier - self.label = label - - @property - def id(self): - return self._id - - def to_data(self): - return { - "id": self.id, - "identifier": self.identifier, - "label": self.label - } - - @classmethod - def from_data(cls, data): - obj = cls(data["identifier"], data["label"]) - obj._id = data["id"] - return obj - - class CreateContext: """Context of instance creation. diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py new file mode 100644 index 0000000000..7fe854c4fc --- /dev/null +++ b/client/ayon_core/pipeline/create/structures.py @@ -0,0 +1,870 @@ +import copy +import collections +from uuid import uuid4 + +from ayon_core.lib.attribute_definitions import ( + UnknownDef, + serialize_attr_defs, + deserialize_attr_defs, +) +from ayon_core.pipeline import ( + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) + +from .exceptions import ImmutableKeyError +from .changes import TrackChangesItem + + +class ConvertorItem(object): + """Item representing convertor plugin. + + Args: + identifier (str): Identifier of convertor. + label (str): Label which will be shown in UI. + """ + + def __init__(self, identifier, label): + self._id = str(uuid4()) + self.identifier = identifier + self.label = label + + @property + def id(self): + return self._id + + def to_data(self): + return { + "id": self.id, + "identifier": self.identifier, + "label": self.label + } + + @classmethod + def from_data(cls, data): + obj = cls(data["identifier"], data["label"]) + obj._id = data["id"] + return obj + + +class InstanceMember: + """Representation of instance member. + + TODO: + Implement and use! + """ + + def __init__(self, instance, name): + self.instance = instance + + instance.add_members(self) + + self.name = name + self._actions = [] + + def add_action(self, label, callback): + self._actions.append({ + "label": label, + "callback": callback + }) + + +class AttributeValues(object): + """Container which keep values of Attribute definitions. + + Goal is to have one object which hold values of attribute definitions for + single instance. + + Has dictionary like methods. Not all of them are allowed all the time. + + Args: + attr_defs(AbstractAttrDef): Definitions of value type and properties. + values(dict): Values after possible conversion. + origin_data(dict): Values loaded from host before conversion. + """ + + def __init__(self, attr_defs, values, origin_data=None): + if origin_data is None: + origin_data = copy.deepcopy(values) + self._origin_data = origin_data + + attr_defs_by_key = { + attr_def.key: attr_def + for attr_def in attr_defs + if attr_def.is_value_def + } + for key, value in values.items(): + if key not in attr_defs_by_key: + new_def = UnknownDef(key, label=key, default=value) + attr_defs.append(new_def) + attr_defs_by_key[key] = new_def + + self._attr_defs = attr_defs + self._attr_defs_by_key = attr_defs_by_key + + self._data = {} + for attr_def in attr_defs: + value = values.get(attr_def.key) + if value is not None: + self._data[attr_def.key] = value + + def __setitem__(self, key, value): + if key not in self._attr_defs_by_key: + raise KeyError("Key \"{}\" was not found.".format(key)) + + old_value = self._data.get(key) + if old_value == value: + return + self._data[key] = value + + def __getitem__(self, key): + if key not in self._attr_defs_by_key: + return self._data[key] + return self._data.get(key, self._attr_defs_by_key[key].default) + + def __contains__(self, key): + return key in self._attr_defs_by_key + + def get(self, key, default=None): + if key in self._attr_defs_by_key: + return self[key] + return default + + def keys(self): + return self._attr_defs_by_key.keys() + + def values(self): + for key in self._attr_defs_by_key.keys(): + yield self._data.get(key) + + def items(self): + for key in self._attr_defs_by_key.keys(): + yield key, self._data.get(key) + + def update(self, value): + for _key, _value in dict(value): + self[_key] = _value + + def pop(self, key, default=None): + value = self._data.pop(key, default) + # Remove attribute definition if is 'UnknownDef' + # - gives option to get rid of unknown values + attr_def = self._attr_defs_by_key.get(key) + if isinstance(attr_def, UnknownDef): + self._attr_defs_by_key.pop(key) + self._attr_defs.remove(attr_def) + return value + + def reset_values(self): + self._data = {} + + def mark_as_stored(self): + self._origin_data = copy.deepcopy(self._data) + + @property + def attr_defs(self): + """Pointer to attribute definitions. + + Returns: + List[AbstractAttrDef]: Attribute definitions. + """ + + return list(self._attr_defs) + + @property + def origin_data(self): + return copy.deepcopy(self._origin_data) + + def data_to_store(self): + """Create new dictionary with data to store. + + Returns: + Dict[str, Any]: Attribute values that should be stored. + """ + + output = {} + for key in self._data: + output[key] = self[key] + + for key, attr_def in self._attr_defs_by_key.items(): + if key not in output: + output[key] = attr_def.default + return output + + def get_serialized_attr_defs(self): + """Serialize attribute definitions to json serializable types. + + Returns: + List[Dict[str, Any]]: Serialized attribute definitions. + """ + + return serialize_attr_defs(self._attr_defs) + + +class CreatorAttributeValues(AttributeValues): + """Creator specific attribute values of an instance. + + Args: + instance (CreatedInstance): Instance for which are values hold. + """ + + def __init__(self, instance, *args, **kwargs): + self.instance = instance + super(CreatorAttributeValues, self).__init__(*args, **kwargs) + + +class PublishAttributeValues(AttributeValues): + """Publish plugin specific attribute values. + + Values are for single plugin which can be on `CreatedInstance` + or context values stored on `CreateContext`. + + Args: + publish_attributes(PublishAttributes): Wrapper for multiple publish + attributes is used as parent object. + """ + + def __init__(self, publish_attributes, *args, **kwargs): + self.publish_attributes = publish_attributes + super(PublishAttributeValues, self).__init__(*args, **kwargs) + + @property + def parent(self): + return self.publish_attributes.parent + + +class PublishAttributes: + """Wrapper for publish plugin attribute definitions. + + Cares about handling attribute definitions of multiple publish plugins. + Keep information about attribute definitions and their values. + + Args: + parent(CreatedInstance, CreateContext): Parent for which will be + data stored and from which are data loaded. + origin_data(dict): Loaded data by plugin class name. + attr_plugins(Union[List[pyblish.api.Plugin], None]): List of publish + plugins that may have defined attribute definitions. + """ + + def __init__(self, parent, origin_data, attr_plugins=None): + self.parent = parent + self._origin_data = copy.deepcopy(origin_data) + + attr_plugins = attr_plugins or [] + self.attr_plugins = attr_plugins + + self._data = copy.deepcopy(origin_data) + self._plugin_names_order = [] + self._missing_plugins = [] + + self.set_publish_plugins(attr_plugins) + + def __getitem__(self, key): + return self._data[key] + + def __contains__(self, key): + return key in self._data + + def keys(self): + return self._data.keys() + + def values(self): + return self._data.values() + + def items(self): + return self._data.items() + + def pop(self, key, default=None): + """Remove or reset value for plugin. + + Plugin values are reset to defaults if plugin is available but + data of plugin which was not found are removed. + + Args: + key(str): Plugin name. + default: Default value if plugin was not found. + """ + + if key not in self._data: + return default + + if key in self._missing_plugins: + self._missing_plugins.remove(key) + removed_item = self._data.pop(key) + return removed_item.data_to_store() + + value_item = self._data[key] + # Prepare value to return + output = value_item.data_to_store() + # Reset values + value_item.reset_values() + return output + + def plugin_names_order(self): + """Plugin names order by their 'order' attribute.""" + + for name in self._plugin_names_order: + yield name + + def mark_as_stored(self): + self._origin_data = copy.deepcopy(self.data_to_store()) + + def data_to_store(self): + """Convert attribute values to "data to store".""" + + output = {} + for key, attr_value in self._data.items(): + output[key] = attr_value.data_to_store() + return output + + @property + def origin_data(self): + return copy.deepcopy(self._origin_data) + + def set_publish_plugins(self, attr_plugins): + """Set publish plugins attribute definitions.""" + + self._plugin_names_order = [] + self._missing_plugins = [] + self.attr_plugins = attr_plugins or [] + + origin_data = self._origin_data + data = self._data + self._data = {} + added_keys = set() + for plugin in attr_plugins: + output = plugin.convert_attribute_values(data) + if output is not None: + data = output + attr_defs = plugin.get_attribute_defs() + if not attr_defs: + continue + + key = plugin.__name__ + added_keys.add(key) + self._plugin_names_order.append(key) + + value = data.get(key) or {} + orig_value = copy.deepcopy(origin_data.get(key) or {}) + self._data[key] = PublishAttributeValues( + self, attr_defs, value, orig_value + ) + + for key, value in data.items(): + if key not in added_keys: + self._missing_plugins.append(key) + self._data[key] = PublishAttributeValues( + self, [], value, value + ) + + def serialize_attributes(self): + return { + "attr_defs": { + plugin_name: attrs_value.get_serialized_attr_defs() + for plugin_name, attrs_value in self._data.items() + }, + "plugin_names_order": self._plugin_names_order, + "missing_plugins": self._missing_plugins + } + + def deserialize_attributes(self, data): + self._plugin_names_order = data["plugin_names_order"] + self._missing_plugins = data["missing_plugins"] + + attr_defs = deserialize_attr_defs(data["attr_defs"]) + + origin_data = self._origin_data + data = self._data + self._data = {} + + added_keys = set() + for plugin_name, attr_defs_data in attr_defs.items(): + attr_defs = deserialize_attr_defs(attr_defs_data) + value = data.get(plugin_name) or {} + orig_value = copy.deepcopy(origin_data.get(plugin_name) or {}) + self._data[plugin_name] = PublishAttributeValues( + self, attr_defs, value, orig_value + ) + + for key, value in data.items(): + if key not in added_keys: + self._missing_plugins.append(key) + self._data[key] = PublishAttributeValues( + self, [], value, value + ) + + +class CreatedInstance: + """Instance entity with data that will be stored to workfile. + + I think `data` must be required argument containing all minimum information + about instance like "folderPath" and "task" and all data used for filling + product name as creators may have custom data for product name filling. + + Notes: + Object have 2 possible initialization. One using 'creator' object which + is recommended for api usage. Second by passing information about + creator. + + Args: + product_type (str): Product type that will be created. + product_name (str): Name of product that will be created. + data (Dict[str, Any]): Data used for filling product name or override + data from already existing instance. + creator (Union[BaseCreator, None]): Creator responsible for instance. + creator_identifier (str): Identifier of creator plugin. + creator_label (str): Creator plugin label. + group_label (str): Default group label from creator plugin. + creator_attr_defs (List[AbstractAttrDef]): Attribute definitions from + creator. + """ + + # Keys that can't be changed or removed from data after loading using + # creator. + # - 'creator_attributes' and 'publish_attributes' can change values of + # their individual children but not on their own + __immutable_keys = ( + "id", + "instance_id", + "product_type", + "creator_identifier", + "creator_attributes", + "publish_attributes" + ) + + def __init__( + self, + product_type, + product_name, + data, + creator=None, + creator_identifier=None, + creator_label=None, + group_label=None, + creator_attr_defs=None, + ): + if creator is not None: + creator_identifier = creator.identifier + group_label = creator.get_group_label() + creator_label = creator.label + creator_attr_defs = creator.get_instance_attr_defs() + + self._creator_label = creator_label + self._group_label = group_label or creator_identifier + + # Instance members may have actions on them + # TODO implement members logic + self._members = [] + + # Data that can be used for lifetime of object + self._transient_data = {} + + # Create a copy of passed data to avoid changing them on the fly + data = copy.deepcopy(data or {}) + + # Pop dictionary values that will be converted to objects to be able + # catch changes + orig_creator_attributes = data.pop("creator_attributes", None) or {} + orig_publish_attributes = data.pop("publish_attributes", None) or {} + + # Store original value of passed data + self._orig_data = copy.deepcopy(data) + + # Pop 'productType' and 'productName' to prevent unexpected changes + data.pop("productType", None) + data.pop("productName", None) + # Backwards compatibility with OpenPype instances + data.pop("family", None) + data.pop("subset", None) + + asset_name = data.pop("asset", None) + if "folderPath" not in data: + data["folderPath"] = asset_name + + # QUESTION Does it make sense to have data stored as ordered dict? + self._data = collections.OrderedDict() + # QUESTION Do we need this "id" information on instance? + item_id = data.get("id") + # TODO use only 'AYON_INSTANCE_ID' when all hosts support it + if item_id not in {AYON_INSTANCE_ID, AVALON_INSTANCE_ID}: + item_id = AVALON_INSTANCE_ID + self._data["id"] = item_id + self._data["productType"] = product_type + self._data["productName"] = product_name + self._data["active"] = data.get("active", True) + self._data["creator_identifier"] = creator_identifier + + # Pop from source data all keys that are defined in `_data` before + # this moment and through their values away + # - they should be the same and if are not then should not change + # already set values + for key in self._data.keys(): + if key in data: + data.pop(key) + + self._data["variant"] = self._data.get("variant") or "" + # Stored creator specific attribute values + # {key: value} + creator_values = copy.deepcopy(orig_creator_attributes) + + self._data["creator_attributes"] = CreatorAttributeValues( + self, + list(creator_attr_defs), + creator_values, + orig_creator_attributes + ) + + # Stored publish specific attribute values + # {: {key: value}} + # - must be set using 'set_publish_plugins' + self._data["publish_attributes"] = PublishAttributes( + self, orig_publish_attributes, None + ) + if data: + self._data.update(data) + + if not self._data.get("instance_id"): + self._data["instance_id"] = str(uuid4()) + + self._folder_is_valid = self.has_set_folder + self._task_is_valid = self.has_set_task + + def __str__(self): + return ( + " {data}" + ).format( + creator_identifier=self.creator_identifier, + product={"name": self.product_name, "type": self.product_type}, + data=str(self._data) + ) + + # --- Dictionary like methods --- + def __getitem__(self, key): + return self._data[key] + + def __contains__(self, key): + return key in self._data + + def __setitem__(self, key, value): + # Validate immutable keys + if key not in self.__immutable_keys: + self._data[key] = value + + elif value != self._data.get(key): + # Raise exception if key is immutable and value has changed + raise ImmutableKeyError(key) + + def get(self, key, default=None): + return self._data.get(key, default) + + def pop(self, key, *args, **kwargs): + # Raise exception if is trying to pop key which is immutable + if key in self.__immutable_keys: + raise ImmutableKeyError(key) + + self._data.pop(key, *args, **kwargs) + + def keys(self): + return self._data.keys() + + def values(self): + return self._data.values() + + def items(self): + return self._data.items() + # ------ + + @property + def product_type(self): + return self._data["productType"] + + @property + def product_name(self): + return self._data["productName"] + + @property + def label(self): + label = self._data.get("label") + if not label: + label = self.product_name + return label + + @property + def group_label(self): + label = self._data.get("group") + if label: + return label + return self._group_label + + @property + def origin_data(self): + output = copy.deepcopy(self._orig_data) + output["creator_attributes"] = self.creator_attributes.origin_data + output["publish_attributes"] = self.publish_attributes.origin_data + return output + + @property + def creator_identifier(self): + return self._data["creator_identifier"] + + @property + def creator_label(self): + return self._creator_label or self.creator_identifier + + @property + def id(self): + """Instance identifier. + + Returns: + str: UUID of instance. + """ + + return self._data["instance_id"] + + @property + def data(self): + """Legacy access to data. + + Access to data is needed to modify values. + + Returns: + CreatedInstance: Object can be used as dictionary but with + validations of immutable keys. + """ + + return self + + @property + def transient_data(self): + """Data stored for lifetime of instance object. + + These data are not stored to scene and will be lost on object + deletion. + + Can be used to store objects. In some host implementations is not + possible to reference to object in scene with some unique identifier + (e.g. node in Fusion.). In that case it is handy to store the object + here. Should be used that way only if instance data are stored on the + node itself. + + Returns: + Dict[str, Any]: Dictionary object where you can store data related + to instance for lifetime of instance object. + """ + + return self._transient_data + + def changes(self): + """Calculate and return changes.""" + + return TrackChangesItem(self.origin_data, self.data_to_store()) + + def mark_as_stored(self): + """Should be called when instance data are stored. + + Origin data are replaced by current data so changes are cleared. + """ + + orig_keys = set(self._orig_data.keys()) + for key, value in self._data.items(): + orig_keys.discard(key) + if key in ("creator_attributes", "publish_attributes"): + continue + self._orig_data[key] = copy.deepcopy(value) + + for key in orig_keys: + self._orig_data.pop(key) + + self.creator_attributes.mark_as_stored() + self.publish_attributes.mark_as_stored() + + @property + def creator_attributes(self): + return self._data["creator_attributes"] + + @property + def creator_attribute_defs(self): + """Attribute definitions defined by creator plugin. + + Returns: + List[AbstractAttrDef]: Attribute definitions. + """ + + return self.creator_attributes.attr_defs + + @property + def publish_attributes(self): + return self._data["publish_attributes"] + + def data_to_store(self): + """Collect data that contain json parsable types. + + It is possible to recreate the instance using these data. + + Todos: + We probably don't need OrderedDict. When data are loaded they + are not ordered anymore. + + Returns: + OrderedDict: Ordered dictionary with instance data. + """ + + output = collections.OrderedDict() + for key, value in self._data.items(): + if key in ("creator_attributes", "publish_attributes"): + continue + output[key] = value + + output["creator_attributes"] = self.creator_attributes.data_to_store() + output["publish_attributes"] = self.publish_attributes.data_to_store() + + return output + + @classmethod + def from_existing(cls, instance_data, creator): + """Convert instance data from workfile to CreatedInstance. + + Args: + instance_data (Dict[str, Any]): Data in a structure ready for + 'CreatedInstance' object. + creator (BaseCreator): Creator plugin which is creating the + instance of for which the instance belong. + """ + + instance_data = copy.deepcopy(instance_data) + + product_type = instance_data.get("productType") + if product_type is None: + product_type = instance_data.get("family") + if product_type is None: + product_type = creator.product_type + product_name = instance_data.get("productName") + if product_name is None: + product_name = instance_data.get("subset") + + return cls( + product_type, product_name, instance_data, creator + ) + + def set_publish_plugins(self, attr_plugins): + """Set publish plugins with attribute definitions. + + This method should be called only from 'CreateContext'. + + Args: + attr_plugins (List[pyblish.api.Plugin]): Pyblish plugins which + inherit from 'AYONPyblishPluginMixin' and may contain + attribute definitions. + """ + + self.publish_attributes.set_publish_plugins(attr_plugins) + + def add_members(self, members): + """Currently unused method.""" + + for member in members: + if member not in self._members: + self._members.append(member) + + def serialize_for_remote(self): + """Serialize object into data to be possible recreated object. + + Returns: + Dict[str, Any]: Serialized data. + """ + + creator_attr_defs = self.creator_attributes.get_serialized_attr_defs() + publish_attributes = self.publish_attributes.serialize_attributes() + return { + "data": self.data_to_store(), + "orig_data": self.origin_data, + "creator_attr_defs": creator_attr_defs, + "publish_attributes": publish_attributes, + "creator_label": self._creator_label, + "group_label": self._group_label, + } + + @classmethod + def deserialize_on_remote(cls, serialized_data): + """Convert instance data to CreatedInstance. + + This is fake instance in remote process e.g. in UI process. The creator + is not a full creator and should not be used for calling methods when + instance is created from this method (matters on implementation). + + Args: + serialized_data (Dict[str, Any]): Serialized data for remote + recreating. Should contain 'data' and 'orig_data'. + """ + + instance_data = copy.deepcopy(serialized_data["data"]) + creator_identifier = instance_data["creator_identifier"] + + product_type = instance_data["productType"] + product_name = instance_data.get("productName", None) + + creator_label = serialized_data["creator_label"] + group_label = serialized_data["group_label"] + creator_attr_defs = deserialize_attr_defs( + serialized_data["creator_attr_defs"] + ) + publish_attributes = serialized_data["publish_attributes"] + + obj = cls( + product_type, + product_name, + instance_data, + creator_identifier=creator_identifier, + creator_label=creator_label, + group_label=group_label, + creator_attr_defs=creator_attr_defs + ) + obj._orig_data = serialized_data["orig_data"] + obj.publish_attributes.deserialize_attributes(publish_attributes) + + return obj + + # Context validation related methods/properties + @property + def has_set_folder(self): + """Folder path is set in data.""" + + return "folderPath" in self._data + + @property + def has_set_task(self): + """Task name is set in data.""" + + return "task" in self._data + + @property + def has_valid_context(self): + """Context data are valid for publishing.""" + + return self.has_valid_folder and self.has_valid_task + + @property + def has_valid_folder(self): + """Folder set in context exists in project.""" + + if not self.has_set_folder: + return False + return self._folder_is_valid + + @property + def has_valid_task(self): + """Task set in context exists in project.""" + + if not self.has_set_task: + return False + return self._task_is_valid + + def set_folder_invalid(self, invalid): + # TODO replace with `set_folder_path` + self._folder_is_valid = not invalid + + def set_task_invalid(self, invalid): + # TODO replace with `set_task_name` + self._task_is_valid = not invalid From b0abbf36fb57d6238a1ee8a29669301c8697ede8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:19:57 +0200 Subject: [PATCH 04/16] moved exceptions from product name too --- client/ayon_core/pipeline/create/__init__.py | 6 ++++-- client/ayon_core/pipeline/create/exceptions.py | 13 +++++++++++++ client/ayon_core/pipeline/create/product_name.py | 15 +-------------- 3 files changed, 18 insertions(+), 16 deletions(-) diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index bb05bc6a09..fa8d639c6f 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -17,6 +17,8 @@ CreatorsSaveFailed, CreatorsRemoveFailed, CreatorsOperationFailed, + TaskNotSetError, + TemplateFillError, ) from .structures import CreatedInstance from .utils import ( @@ -25,7 +27,6 @@ ) from .product_name import ( - TaskNotSetError, get_product_name, get_product_name_template, ) @@ -74,13 +75,14 @@ "CreatorsSaveFailed", "CreatorsRemoveFailed", "CreatorsOperationFailed", + "TaskNotSetError", + "TemplateFillError", "CreatedInstance", "get_last_versions_for_instances", "get_next_versions_for_instances", - "TaskNotSetError", "get_product_name", "get_product_name_template", diff --git a/client/ayon_core/pipeline/create/exceptions.py b/client/ayon_core/pipeline/create/exceptions.py index 24264840cb..8910d3fa09 100644 --- a/client/ayon_core/pipeline/create/exceptions.py +++ b/client/ayon_core/pipeline/create/exceptions.py @@ -112,3 +112,16 @@ def __init__(self, failed_info): msg = "Failed to create instances" super().__init__(msg, failed_info) + +class TaskNotSetError(KeyError): + def __init__(self, msg=None): + if not msg: + msg = "Creator's product name template requires task name." + super().__init__(msg) + + +class TemplateFillError(Exception): + def __init__(self, msg=None): + if not msg: + msg = "Creator's product name template is missing key value." + super().__init__(msg) diff --git a/client/ayon_core/pipeline/create/product_name.py b/client/ayon_core/pipeline/create/product_name.py index 8a08bdc36c..0c6fb70169 100644 --- a/client/ayon_core/pipeline/create/product_name.py +++ b/client/ayon_core/pipeline/create/product_name.py @@ -4,20 +4,7 @@ from ayon_core.lib import filter_profiles, prepare_template_data from .constants import DEFAULT_PRODUCT_TEMPLATE - - -class TaskNotSetError(KeyError): - def __init__(self, msg=None): - if not msg: - msg = "Creator's product name template requires task name." - super(TaskNotSetError, self).__init__(msg) - - -class TemplateFillError(Exception): - def __init__(self, msg=None): - if not msg: - msg = "Creator's product name template is missing key value." - super(TemplateFillError, self).__init__(msg) +from .exceptions import TaskNotSetError, TemplateFillError def get_product_name_template( From 5d4e086978e7411b0705fcb340c3e8bf9ba2f9ff Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:21:50 +0200 Subject: [PATCH 05/16] remove python 2 compatibility --- client/ayon_core/pipeline/create/changes.py | 2 +- client/ayon_core/pipeline/create/creator_plugins.py | 2 +- client/ayon_core/pipeline/create/legacy_create.py | 2 +- client/ayon_core/pipeline/create/structures.py | 8 ++++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/client/ayon_core/pipeline/create/changes.py b/client/ayon_core/pipeline/create/changes.py index 217478ee30..c8b81cac48 100644 --- a/client/ayon_core/pipeline/create/changes.py +++ b/client/ayon_core/pipeline/create/changes.py @@ -3,7 +3,7 @@ _EMPTY_VALUE = object() -class TrackChangesItem(object): +class TrackChangesItem: """Helper object to track changes in data. Has access to full old and new data and will create deep copy of them, diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index 1e09eb62a1..61c10ee736 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -644,7 +644,7 @@ def __init__(self, *args, **kwargs): cls._get_default_variant_wrap, cls._set_default_variant_wrap ) - super(Creator, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) @property def show_order(self): diff --git a/client/ayon_core/pipeline/create/legacy_create.py b/client/ayon_core/pipeline/create/legacy_create.py index fc24bcf934..ec9b23ac62 100644 --- a/client/ayon_core/pipeline/create/legacy_create.py +++ b/client/ayon_core/pipeline/create/legacy_create.py @@ -14,7 +14,7 @@ from .product_name import get_product_name -class LegacyCreator(object): +class LegacyCreator: """Determine how assets are created""" label = None product_type = None diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py index 7fe854c4fc..41c130214d 100644 --- a/client/ayon_core/pipeline/create/structures.py +++ b/client/ayon_core/pipeline/create/structures.py @@ -16,7 +16,7 @@ from .changes import TrackChangesItem -class ConvertorItem(object): +class ConvertorItem: """Item representing convertor plugin. Args: @@ -69,7 +69,7 @@ def add_action(self, label, callback): }) -class AttributeValues(object): +class AttributeValues: """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for @@ -210,7 +210,7 @@ class CreatorAttributeValues(AttributeValues): def __init__(self, instance, *args, **kwargs): self.instance = instance - super(CreatorAttributeValues, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) class PublishAttributeValues(AttributeValues): @@ -226,7 +226,7 @@ class PublishAttributeValues(AttributeValues): def __init__(self, publish_attributes, *args, **kwargs): self.publish_attributes = publish_attributes - super(PublishAttributeValues, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) @property def parent(self): From 7ef60ad4c91af830376d3ffeccd825dd5133e3f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:22:03 +0200 Subject: [PATCH 06/16] fix 'update' method --- client/ayon_core/pipeline/create/structures.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/client/ayon_core/pipeline/create/structures.py b/client/ayon_core/pipeline/create/structures.py index 41c130214d..4f7caa6e11 100644 --- a/client/ayon_core/pipeline/create/structures.py +++ b/client/ayon_core/pipeline/create/structures.py @@ -112,10 +112,7 @@ def __setitem__(self, key, value): if key not in self._attr_defs_by_key: raise KeyError("Key \"{}\" was not found.".format(key)) - old_value = self._data.get(key) - if old_value == value: - return - self._data[key] = value + self.update({key: value}) def __getitem__(self, key): if key not in self._attr_defs_by_key: @@ -142,8 +139,12 @@ def items(self): yield key, self._data.get(key) def update(self, value): - for _key, _value in dict(value): - self[_key] = _value + changes = {} + for _key, _value in dict(value).items(): + if _key in self._data and self._data.get(_key) == _value: + continue + self._data[_key] = _value + changes[_key] = _value def pop(self, key, default=None): value = self._data.pop(key, default) From d5602cb89a3123dcc5d7b457b40e652db9133c78 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:30 +0200 Subject: [PATCH 07/16] simpler import --- client/ayon_core/pipeline/create/context.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index a11bc311dc..76eb620b4d 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -14,9 +14,7 @@ from ayon_core.settings import get_project_settings from ayon_core.lib import is_func_signature_supported -from ayon_core.lib.attribute_definitions import ( - get_default_values, -) +from ayon_core.lib.attribute_definitions import get_default_values from ayon_core.host import IPublishHost, IWorkfileHost from ayon_core.pipeline import Anatomy from ayon_core.pipeline.plugin_discover import DiscoverResult From 4b8b57e39a47cf446b9368f6a8de146d0de04b6c Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:41 +0200 Subject: [PATCH 08/16] remove unecessary line --- client/ayon_core/pipeline/create/context.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 76eb620b4d..f5ba7b4774 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -373,7 +373,6 @@ def get_current_task_entity(self): self._current_task_entity = task_entity return copy.deepcopy(self._current_task_entity) - def get_current_workfile_path(self): """Workfile path which was opened on context reset. From 3296079df689acff6d2d950043428b3ebaaa1b91 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:50 +0200 Subject: [PATCH 09/16] make sure exc_info is defined --- client/ayon_core/pipeline/create/context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index f5ba7b4774..2326a829e3 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -871,6 +871,7 @@ def _create_with_unified_error( add_traceback = False result = None fail_info = None + exc_info = None success = False try: From 04b37b83c64e294da9255698c01c227fe443cb29 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:27:59 +0200 Subject: [PATCH 10/16] fix bulk processing --- client/ayon_core/pipeline/create/context.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 2326a829e3..b3a46bb778 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -965,9 +965,11 @@ def bulk_instances_collection(self): finally: self._bulk_counter -= 1 - # Trigger validation if there is no more context manager for bulk - # instance validation - if self._bulk_counter == 0: + # Trigger validation if there is no more context manager for bulk + # instance validation + if self._bulk_counter != 0: + return + ( self._bulk_instances_to_process, instances_to_validate From 102ec52dd0c88d466e33adc947f3df1ffff33ee0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 17:30:18 +0200 Subject: [PATCH 11/16] add exceptions moved to different file --- client/ayon_core/pipeline/create/context.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index b3a46bb778..69103159c6 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -39,6 +39,13 @@ discover_convertor_plugins, ) +# Import of exceptions that were moved to different file +from .exceptions import ( + ImmutableKeyError, + CreatorsOperationFailed, + ConvertorsOperationFailed, +) # noqa: F401 + # Changes of instances and context are send as tuple of 2 information UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) _NOT_SET = object() From f414d73f7dcd5d4359795154028c3ab355cd5dd3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 17:58:34 +0200 Subject: [PATCH 12/16] use shorter import --- client/ayon_core/tools/publisher/abstract.py | 2 +- client/ayon_core/tools/publisher/models/create.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_core/tools/publisher/abstract.py b/client/ayon_core/tools/publisher/abstract.py index 768f4b052f..ce9c6ac1ed 100644 --- a/client/ayon_core/tools/publisher/abstract.py +++ b/client/ayon_core/tools/publisher/abstract.py @@ -14,7 +14,7 @@ from ayon_core.lib import AbstractAttrDef from ayon_core.host import HostBase from ayon_core.pipeline.create import CreateContext, CreatedInstance -from ayon_core.pipeline.create.context import ConvertorItem +from ayon_core.pipeline.create import ConvertorItem from ayon_core.tools.common_models import ( FolderItem, TaskItem, diff --git a/client/ayon_core/tools/publisher/models/create.py b/client/ayon_core/tools/publisher/models/create.py index ab2bf07614..9fe114f4bd 100644 --- a/client/ayon_core/tools/publisher/models/create.py +++ b/client/ayon_core/tools/publisher/models/create.py @@ -18,7 +18,7 @@ CreateContext, CreatedInstance, ) -from ayon_core.pipeline.create.context import ( +from ayon_core.pipeline.create import ( CreatorsOperationFailed, ConvertorsOperationFailed, ConvertorItem, From a4ed32e3e114480e0fe54624b664b04c90d21e92 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:12:56 +0200 Subject: [PATCH 13/16] added strucctures to init file --- client/ayon_core/pipeline/create/__init__.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/create/__init__.py b/client/ayon_core/pipeline/create/__init__.py index fa8d639c6f..ced43528eb 100644 --- a/client/ayon_core/pipeline/create/__init__.py +++ b/client/ayon_core/pipeline/create/__init__.py @@ -20,7 +20,14 @@ TaskNotSetError, TemplateFillError, ) -from .structures import CreatedInstance +from .structures import ( + CreatedInstance, + ConvertorItem, + AttributeValues, + CreatorAttributeValues, + PublishAttributeValues, + PublishAttributes, +) from .utils import ( get_last_versions_for_instances, get_next_versions_for_instances, @@ -79,6 +86,11 @@ "TemplateFillError", "CreatedInstance", + "ConvertorItem", + "AttributeValues", + "CreatorAttributeValues", + "PublishAttributeValues", + "PublishAttributes", "get_last_versions_for_instances", "get_next_versions_for_instances", From e33f8670fd9cc3f1c3c99cfad8d989b5c0cd6cf6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:13:06 +0200 Subject: [PATCH 14/16] fake import classes from structures too --- client/ayon_core/pipeline/create/context.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 69103159c6..71ba3b7799 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -39,12 +39,18 @@ discover_convertor_plugins, ) -# Import of exceptions that were moved to different file +# Import of functions and classes that were moved to different file +# TODO Should be removed in future release - Added 24/08/28, 0.4.3-dev.1 from .exceptions import ( ImmutableKeyError, CreatorsOperationFailed, ConvertorsOperationFailed, ) # noqa: F401 +from .structures import ( + AttributeValues, + CreatorAttributeValues, + PublishAttributeValues, +) # noqa: F401 # Changes of instances and context are send as tuple of 2 information UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) From 0ce2bf6633bdfc1ac7ef6b6ec9fbcf2c8cd64066 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:25:40 +0200 Subject: [PATCH 15/16] changed noqa location --- client/ayon_core/pipeline/create/context.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 71ba3b7799..3f067427fa 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -42,15 +42,15 @@ # Import of functions and classes that were moved to different file # TODO Should be removed in future release - Added 24/08/28, 0.4.3-dev.1 from .exceptions import ( - ImmutableKeyError, - CreatorsOperationFailed, - ConvertorsOperationFailed, -) # noqa: F401 + ImmutableKeyError, # noqa: F401 + CreatorsOperationFailed, # noqa: F401 + ConvertorsOperationFailed, # noqa: F401 +) from .structures import ( - AttributeValues, - CreatorAttributeValues, - PublishAttributeValues, -) # noqa: F401 + AttributeValues, # noqa: F401 + CreatorAttributeValues, # noqa: F401 + PublishAttributeValues, # noqa: F401 +) # Changes of instances and context are send as tuple of 2 information UpdateData = collections.namedtuple("UpdateData", ["instance", "changes"]) From 237e17b658d2cf016614e8c8ac30a69a2e0208e0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:34:20 +0200 Subject: [PATCH 16/16] merge import --- client/ayon_core/tools/publisher/abstract.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/client/ayon_core/tools/publisher/abstract.py b/client/ayon_core/tools/publisher/abstract.py index ce9c6ac1ed..362fa38882 100644 --- a/client/ayon_core/tools/publisher/abstract.py +++ b/client/ayon_core/tools/publisher/abstract.py @@ -13,8 +13,11 @@ from ayon_core.lib import AbstractAttrDef from ayon_core.host import HostBase -from ayon_core.pipeline.create import CreateContext, CreatedInstance -from ayon_core.pipeline.create import ConvertorItem +from ayon_core.pipeline.create import ( + CreateContext, + CreatedInstance, + ConvertorItem, +) from ayon_core.tools.common_models import ( FolderItem, TaskItem,