From d1a883be55a1f281937cba404f589a56b1b27eaa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Oct 2024 18:50:19 +0200 Subject: [PATCH 001/153] New Settings for CollectJobInfoItem These settings should be base of generic JobInfo values for Deadline submission. They should contain variables previously contained in Submit* Settings. Some of them should be exposed to Publisher UI as artist overrides. --- server/settings/publish_plugins.py | 263 ++++++++++------------------- 1 file changed, 93 insertions(+), 170 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index af341bfedd..daf99d09f2 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -4,9 +4,25 @@ BaseSettingsModel, SettingsField, ensure_unique_names, + task_types_enum, ) +class LimitGroupsSubmodel(BaseSettingsModel): + _layout = "expanded" + name: str = SettingsField(title="Name") + value: list[str] = SettingsField( + default_factory=list, + title="Limit Groups" + ) + + +class EnvSearchReplaceSubmodel(BaseSettingsModel): + _layout = "compact" + name: str = SettingsField(title="Name") + value: str = SettingsField(title="Value") + + class CollectDeadlinePoolsModel(BaseSettingsModel): """Settings Deadline default pools.""" @@ -15,6 +31,79 @@ class CollectDeadlinePoolsModel(BaseSettingsModel): secondary_pool: str = SettingsField(title="Secondary Pool") +def extract_jobinfo_overrides_enum(): + return [ + {"label": "Frames per Task", "value": "chunk_size"}, + {"label": "Priority", "value": "priority"}, + {"label": "Group", "value": "group"}, + {"label": "Limit groups", "value": "limit_groups"}, + {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, + {"label": "Group", "value": "group"}, + + ] + + +class CollectJobInfoItem(BaseSettingsModel): + _layout = "expanded" + host_names: list[str] = SettingsField( + default_factory=list, + title="Host names" + ) + task_types: list[str] = SettingsField( + default_factory=list, + title="Task types", + enum_resolver=task_types_enum + ) + task_names: list[str] = SettingsField( + default_factory=list, + title="Task names" + ) + + chunk_size: int = SettingsField(999, title="Frames per Task") + priority: int = SettingsField(50, title="Priority") + group: str = SettingsField("", title="Group") + limit_groups: list[LimitGroupsSubmodel] = SettingsField( + default_factory=list, + title="Limit Groups", + ) + concurrent_tasks: int = SettingsField( + 1, title="Number of concurrent tasks") + department: str = SettingsField("", title="Department") + use_gpu: bool = SettingsField("", title="Use GPU") + job_delay: str = SettingsField( + "", title="Delay job", + placeholder="dd:hh:mm:ss" + ) + use_published: bool = SettingsField(True, title="Use Published scene") + asset_dependencies: bool = SettingsField(True, title="Use Asset dependencies") + workfile_dependency: bool = SettingsField(True, title="Workfile Dependency") + multiprocess: bool = SettingsField(False, title="Multiprocess") + + env_allowed_keys: list[str] = SettingsField( + default_factory=list, + title="Allowed environment keys", + description="Pass selected environment variables with current value" + ) + env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( + default_factory=list, + title="Search & replace in environment values", + description="Replace string values in 'Name' with value from 'Value'" + ) + overrides: list[str] = SettingsField( + enum_resolver=extract_jobinfo_overrides_enum, + title="Exposed Overrides", + description=( + "Expose the attribute in this list to the user when publishing." + ) + ) + + +class CollectJobInfoModel(BaseSettingsModel): + _isGroup = True + enabled: bool = SettingsField(False) + profiles: list[CollectJobInfoItem] = SettingsField(default_factory=list) + + class ValidateExpectedFilesModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") active: bool = SettingsField(True, title="Active") @@ -56,18 +145,11 @@ class MayaSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") import_reference: bool = SettingsField( title="Use Scene with Imported Reference" ) - asset_dependencies: bool = SettingsField(title="Use Asset dependencies") - priority: int = SettingsField(title="Priority") tile_priority: int = SettingsField(title="Tile Priority") - group: str = SettingsField(title="Group") - limit: list[str] = SettingsField( - default_factory=list, - title="Limit Groups" - ) + tile_assembler_plugin: str = SettingsField( title="Tile Assembler Plugin", enum_resolver=tile_assembler_enum, @@ -99,25 +181,6 @@ class MaxSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True) optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Frame per Task") - group: str = SettingsField("", title="Group Name") - - -class EnvSearchReplaceSubmodel(BaseSettingsModel): - _layout = "compact" - name: str = SettingsField(title="Name") - value: str = SettingsField(title="Value") - - -class LimitGroupsSubmodel(BaseSettingsModel): - _layout = "expanded" - name: str = SettingsField(title="Name") - value: list[str] = SettingsField( - default_factory=list, - title="Limit Groups" - ) def fusion_deadline_plugin_enum(): @@ -142,12 +205,9 @@ class FusionSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") optional: bool = SettingsField(False, title="Optional") active: bool = SettingsField(True, title="Active") - priority: int = SettingsField(50, title="Priority") - chunk_size: int = SettingsField(10, title="Frame per Task") concurrent_tasks: int = SettingsField( 1, title="Number of concurrent tasks" ) - group: str = SettingsField("", title="Group Name") plugin: str = SettingsField("Fusion", enum_resolver=fusion_deadline_plugin_enum, title="Deadline Plugin") @@ -159,38 +219,6 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - concurrent_tasks: int = SettingsField(title="Number of concurrent tasks") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") - use_gpu: bool = SettingsField(title="Use GPU") - workfile_dependency: bool = SettingsField(title="Workfile Dependency") - use_published_workfile: bool = SettingsField( - title="Use Published Workfile" - ) - - env_allowed_keys: list[str] = SettingsField( - default_factory=list, - title="Allowed environment keys" - ) - - env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( - default_factory=list, - title="Search & replace in environment values", - ) - - limit_groups: list[LimitGroupsSubmodel] = SettingsField( - default_factory=list, - title="Limit Groups", - ) - - @validator( - "limit_groups", - "env_search_replace_values") - def validate_unique_names(cls, value): - ensure_unique_names(value) - return value class HarmonySubmitDeadlineModel(BaseSettingsModel): @@ -199,11 +227,6 @@ class HarmonySubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") class HoudiniSubmitDeadlineModel(BaseSettingsModel): @@ -212,25 +235,6 @@ class HoudiniSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - limits: str = SettingsField( - title="Limit Groups", - description=( - "Enter a comma separated list of limits.\n" - "Specifies the limit groups that this job is a member of (default = blank)." - ) - ) - machine_limit: int = SettingsField( - title="Machine Limit", - description=( - "Specifies the maximum number of machines this job can be" - " rendered on at the same time (default = 0, which means" - " unlimited)." - ) - ) - export_priority: int = SettingsField(title="Export Priority") export_chunk_size: int = SettingsField(title="Export Chunk Size") export_group: str = SettingsField(title="Export Group") @@ -257,25 +261,6 @@ class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - limits: str = SettingsField( - title="Limit Groups", - description=( - "Enter a comma separated list of limits.\n" - "Specifies the limit groups that this job is a member of (default = blank)." - ) - ) - machine_limit: int = SettingsField( - title="Machine Limit", - description=( - "Specifies the maximum number of machines this job can be" - " rendered on at the same time (default = 0, which means" - " unlimited)." - ) - ) - class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): """After Effects deadline submitter settings.""" @@ -283,12 +268,6 @@ class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Chunk Size") - group: str = SettingsField(title="Group") - department: str = SettingsField(title="Department") - multiprocess: bool = SettingsField(title="Optional") class CelactionSubmitDeadlineModel(BaseSettingsModel): @@ -310,14 +289,6 @@ class BlenderSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True) optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") - use_published: bool = SettingsField(title="Use Published scene") - asset_dependencies: bool = SettingsField(title="Use Asset dependencies") - priority: int = SettingsField(title="Priority") - chunk_size: int = SettingsField(title="Frame per Task") - group: str = SettingsField("", title="Group Name") - job_delay: str = SettingsField( - "", title="Delay job (timecode dd:hh:mm:ss)" - ) class AOVFilterSubmodel(BaseSettingsModel): @@ -373,6 +344,9 @@ class PublishPluginsModel(BaseSettingsModel): CollectDeadlinePools: CollectDeadlinePoolsModel = SettingsField( default_factory=CollectDeadlinePoolsModel, title="Default Pools") + CollectJobInfo: CollectJobInfoModel = SettingsField( + default_factory=CollectJobInfoModel, + title="Collect JobInfo") ValidateExpectedFiles: ValidateExpectedFilesModel = SettingsField( default_factory=ValidateExpectedFilesModel, title="Validate Expected Files" @@ -440,23 +414,11 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "", - "multiprocess": True }, "BlenderSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "use_published": True, - "asset_dependencies": True, - "priority": 50, - "chunk_size": 10, - "group": "none", - "job_delay": "00:00:00:00" }, "CelactionSubmitDeadline": { "enabled": True, @@ -472,40 +434,21 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "" }, "HarmonySubmitDeadline": { "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10000, - "group": "", - "department": "" }, "HoudiniCacheSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 999999, - "group": "", - "limits": "", - "machine_limit": 0 }, "HoudiniSubmitDeadline": { "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 1, - "group": "", - "limits": "", - "machine_limit": 0, "export_priority": 50, "export_chunk_size": 10, "export_group": "", @@ -516,24 +459,15 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "use_published": True, - "priority": 50, - "chunk_size": 10, - "group": "none" }, "MayaSubmitDeadline": { "enabled": True, "optional": False, "active": True, "tile_assembler_plugin": "DraftTileAssembler", - "use_published": True, "import_reference": False, - "asset_dependencies": True, "strict_error_checking": True, - "priority": 50, "tile_priority": 50, - "group": "none", - "limit": [], # this used to be empty dict "jobInfo": "", # this used to be empty dict @@ -544,17 +478,6 @@ class PublishPluginsModel(BaseSettingsModel): "enabled": True, "optional": False, "active": True, - "priority": 50, - "chunk_size": 10, - "concurrent_tasks": 1, - "group": "", - "department": "", - "use_gpu": True, - "workfile_dependency": True, - "use_published_workfile": True, - "env_allowed_keys": [], - "env_search_replace_values": [], - "limit_groups": [] }, "ProcessSubmittedCacheJobOnFarm": { "enabled": True, From f72e87f34cee15949f6090b68324c01f672a5dd6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Oct 2024 17:12:12 +0200 Subject: [PATCH 002/153] Matched implemented artist overrides definitions --- .../plugins/publish/global/collect_jobinfo.py | 172 ++++++++++++++++++ server/settings/publish_plugins.py | 2 +- 2 files changed, 173 insertions(+), 1 deletion(-) create mode 100644 client/ayon_deadline/plugins/publish/global/collect_jobinfo.py diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py new file mode 100644 index 0000000000..099182dec2 --- /dev/null +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +from collections import OrderedDict + +import ayon_api +import pyblish.api +from ayon_core.lib import ( + BoolDef, + NumberDef, + TextDef, + EnumDef, + is_in_tests, + UISeparatorDef +) +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.settings import get_project_settings +from ayon_core.lib.profiles_filtering import filter_profiles + +from ayon_deadline.lib import FARM_FAMILIES + + +class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): + """Collect variables that belong to Deadline's JobInfo. + + Variables like: + - department + - priority + - chunk size + + """ + + order = pyblish.api.CollectorOrder + 0.420 + label = "Collect Deadline JobInfo" + + families = FARM_FAMILIES + + def process(self, instance): + attr_values = self.get_attr_values_from_data(instance.data) + self.log.info(attr_values) + + @classmethod + def get_attr_defs_for_instance(cls, create_context, instance): + if not cls.instance_matches_plugin_families(instance): + return [] + + if not instance["active"]: # TODO origin_data seem not right + return [] + + project_name = create_context.project_name + project_settings = get_project_settings(project_name) + + host_name = create_context.host_name + + task_name = instance["task"] + folder_path = instance["folderPath"] + folder_entity = ayon_api.get_folder_by_path(project_name,folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name) + profiles = ( + project_settings["deadline"]["publish"][cls.__name__]["profiles"]) + + if not profiles: + return [] + + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_entity["taskType"], + "task_names": task_name, + # "product_type": product_type + } + ) + overrides = set(profile["overrides"]) + if not profile or not overrides: + return [] + + defs = [] + + # should be matching to extract_jobinfo_overrides_enum + override_defs = OrderedDict({ + "chunkSize": NumberDef( + "chunkSize", + label="Frames Per Task", + default=1, + decimals=0, + minimum=1, + maximum=1000 + ), + "priority": NumberDef( + "priority", + label="Priority", + decimals=0 + ), + "department": TextDef( + "department", + label="Department", + default="", + ), + "limit_groups": TextDef( + "limit_groups", + label="Limit Groups", + default="", + placeholder="machine1,machine2" + ), + "job_delay": TextDef( + "job_delay", + label="Delay job (timecode dd:hh:mm:ss)", + default="" + ), + }) + + defs.extend([ + UISeparatorDef("options"), + ]) + + # The Arguments that can be modified by the Publisher + for key, value in override_defs.items(): + if key not in overrides: + continue + + default_value = profile[key] + value.default = default_value + defs.append(value) + + defs.append( + UISeparatorDef("sep_alembic_options_end") + ) + + return defs + + @classmethod + def register_create_context_callbacks(cls, create_context): + create_context.add_value_changed_callback(cls.on_values_changed) + + @classmethod + def on_value_change(cls, event): + for instance_change in event["changes"]: + if not cls.instance_matches_plugin_families(instance): + continue + value_changes = instance_change["changes"] + if "enabled" not in value_changes: + continue + instance = instance_change["instance"] + new_attrs = cls.get_attr_defs_for_instance( + event["create_context"], instance + ) + instance.set_publish_plugin_attr_defs(cls.__name__, new_attrs) + + +class CollectMayaJobInfo(CollectJobInfo): + hosts = [ + "maya", + ] + @classmethod + def get_attribute_defs(cls): + defs = super().get_attribute_defs() + + defs.extend([ + NumberDef( + "tile_priority", + label="Tile Assembler Priority", + decimals=0, + default=cls.tile_priorit + ), + BoolDef( + "strict_error_checking", + label="Strict Error Checking", + default=cls.strict_error_checking + ), + ]) + + return defs \ No newline at end of file diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index daf99d09f2..f5cc685e57 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -35,7 +35,7 @@ def extract_jobinfo_overrides_enum(): return [ {"label": "Frames per Task", "value": "chunk_size"}, {"label": "Priority", "value": "priority"}, - {"label": "Group", "value": "group"}, + {"label": "Department", "value": "department"}, {"label": "Limit groups", "value": "limit_groups"}, {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, {"label": "Group", "value": "group"}, From 2b294037e0224164f4a92f9b4a3b312bc86a88c4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 17 Oct 2024 17:51:43 +0200 Subject: [PATCH 003/153] Added collected jobInfo object --- client/ayon_deadline/lib.py | 26 ++++++++++ .../plugins/publish/global/collect_jobinfo.py | 50 +++++++++++++++++-- 2 files changed, 73 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 75ad830a77..2eff33971a 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,4 +1,6 @@ import os +from dataclasses import dataclass +from typing import List, Dict # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -43,3 +45,27 @@ def get_instance_job_envs(instance) -> "dict[str, str]": env = dict(sorted(env.items())) return env + + +@dataclass +class JobInfo: + priority: int + chunk_size: int + group: str + limit_groups: List[str] + concurrent_tasks: int + department: str + use_gpu: bool + job_delay: str + use_published: bool + asset_dependencies: bool + workfile_dependency: bool + multiprocess: bool + env_allowed_keys: List[str] + env_search_replace_values: Dict[str, str] + + @classmethod + def from_dict(cls, data: Dict) -> 'JobInfo': + # Filter the dictionary to only include keys that are fields in the dataclass + filtered_data = {k: v for k, v in data.items() if k in cls.__annotations__} + return cls(**filtered_data) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 099182dec2..88b7a380f9 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -15,7 +15,7 @@ from ayon_core.settings import get_project_settings from ayon_core.lib.profiles_filtering import filter_profiles -from ayon_deadline.lib import FARM_FAMILIES +from ayon_deadline.lib import FARM_FAMILIES, JobInfo class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): @@ -34,8 +34,11 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): families = FARM_FAMILIES def process(self, instance): - attr_values = self.get_attr_values_from_data(instance.data) - self.log.info(attr_values) + attr_values = self._get_jobinfo_defaults(instance) + + attr_values.update(self.get_attr_values_from_data(instance.data)) + job_info = JobInfo.from_dict(attr_values) + instance.data["deadline"]["job_info"] = job_info @classmethod def get_attr_defs_for_instance(cls, create_context, instance): @@ -45,6 +48,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): if not instance["active"]: # TODO origin_data seem not right return [] + # will be reworked when CreateContext contains settings and task types project_name = create_context.project_name project_settings = get_project_settings(project_name) @@ -146,6 +150,46 @@ def on_value_change(cls, event): ) instance.set_publish_plugin_attr_defs(cls.__name__, new_attrs) + def _get_jobinfo_defaults(self, instance): + """Queries project setting for profile with default values + + Args: + instance (pyblish.api.Instance): Source instance. + + Returns: + (dict) + """ + attr_values = {} + + context_data = instance.context.data + host_name = context_data["hostName"] + project_settings = context_data["project_settings"] + task_entity = context_data["taskEntity"] + + task_name = task_type = "" + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + profiles = ( + project_settings["deadline"] + ["publish"] + ["CollectJobInfo"] + ["profiles"] + ) + if profiles: + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_type, + "task_names": task_name, + # "product_type": product_type + } + ) + if profile: + attr_values = profile + return attr_values + class CollectMayaJobInfo(CollectJobInfo): hosts = [ From cb0d24d588e1efd12d6ea8b1dfd548d86096650f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 11:36:18 +0200 Subject: [PATCH 004/153] Use DeadlineJobInfo as collector object It was decided that dataclasses should be used instead of attrs. This moves DeadlineJobInfo which is full mapping of JobInfo from abstract submitters to collectors to limit need of new class and necessary remapping later. --- client/ayon_deadline/lib.py | 206 ++++++++++++++++-- .../plugins/publish/global/collect_jobinfo.py | 4 +- 2 files changed, 190 insertions(+), 20 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 2eff33971a..ba6ffdea6d 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,6 +1,6 @@ import os -from dataclasses import dataclass -from typing import List, Dict +from dataclasses import dataclass, field +from typing import Optional, Dict, List # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -48,24 +48,194 @@ def get_instance_job_envs(instance) -> "dict[str, str]": @dataclass -class JobInfo: - priority: int - chunk_size: int - group: str - limit_groups: List[str] - concurrent_tasks: int - department: str - use_gpu: bool - job_delay: str - use_published: bool - asset_dependencies: bool - workfile_dependency: bool - multiprocess: bool - env_allowed_keys: List[str] - env_search_replace_values: Dict[str, str] +class DeadlineJobInfo: + """Mapping of all Deadline JobInfo attributes. + + This contains all JobInfo attributes plus their default values. + Those attributes set to `None` shouldn't be posted to Deadline as + the only required one is `Plugin`. + """ + + # Required + Plugin: str = field(default="Untitled") + + # General + Name: str = field(default="Untitled") + Frames: Optional[int] = field(default=None) # default: 0 + Comment: Optional[str] = field(default=None) # default: empty + Department: Optional[str] = field(default=None) # default: empty + BatchName: Optional[str] = field(default=None) # default: empty + UserName: str = field(default=None) + MachineName: str = field(default=None) + Pool: Optional[str] = field(default=None) # default: "none" + SecondaryPool: Optional[str] = field(default=None) + Group: Optional[str] = field(default=None) # default: "none" + Priority: int = field(default=None) + ChunkSize: int = field(default=None) + ConcurrentTasks: int = field(default=None) + LimitConcurrentTasksToNumberOfCpus: Optional[bool] = field( + default=None) # default: "true" + OnJobComplete: str = field(default=None) + SynchronizeAllAuxiliaryFiles: Optional[bool] = field( + default=None) # default: false + ForceReloadPlugin: Optional[bool] = field(default=None) # default: false + Sequential: Optional[bool] = field(default=None) # default: false + SuppressEvents: Optional[bool] = field(default=None) # default: false + Protected: Optional[bool] = field(default=None) # default: false + InitialStatus: str = field(default="Active") + NetworkRoot: Optional[str] = field(default=None) + + # Timeouts + MinRenderTimeSeconds: Optional[int] = field(default=None) # Default: 0 + MinRenderTimeMinutes: Optional[int] = field(default=None) # Default: 0 + TaskTimeoutSeconds: Optional[int] = field(default=None) # Default: 0 + TaskTimeoutMinutes: Optional[int] = field(default=None) # Default: 0 + StartJobTimeoutSeconds: Optional[int] = field(default=None) # Default: 0 + StartJobTimeoutMinutes: Optional[int] = field(default=None) # Default: 0 + InitializePluginTimeoutSeconds: Optional[int] = field( + default=None) # Default: 0 + OnTaskTimeout: Optional[str] = field(default=None) # Default: Error + EnableTimeoutsForScriptTasks: Optional[bool] = field( + default=None) # Default: false + EnableFrameTimeouts: Optional[bool] = field(default=None) # Default: false + EnableAutoTimeout: Optional[bool] = field(default=None) # Default: false + + # Interruptible + Interruptible: Optional[bool] = field(default=None) # Default: false + InterruptiblePercentage: Optional[int] = field(default=None) + RemTimeThreshold: Optional[int] = field(default=None) + + # Notifications + NotificationTargets: Optional[str] = field( + default=None) # Default: blank (comma-separated list of users) + ClearNotificationTargets: Optional[bool] = field( + default=None) # Default: false + NotificationEmails: Optional[str] = field( + default=None) # Default: blank (comma-separated list of email addresses) + OverrideNotificationMethod: Optional[bool] = field( + default=None) # Default: false + EmailNotification: Optional[bool] = field(default=None) # Default: false + PopupNotification: Optional[bool] = field(default=None) # Default: false + NotificationNote: Optional[str] = field(default=None) # Default: blank + + # Machine Limit + MachineLimit: Optional[int] = field(default=None) # Default: 0 + MachineLimitProgress: Optional[float] = field(default=None) # Default -1.0 + Whitelist: Optional[str] = field( + default=None) # Default blank (comma-separated list) + Blacklist: Optional[str] = field( + default=None) # Default blank (comma-separated list) + + # Limits + LimitGroups: Optional[str] = field(default=None) # Default: blank + + # Dependencies + JobDependencies: Optional[str] = field(default=None) # Default: blank + JobDependencyPercentage: Optional[int] = field(default=None) # Default: -1 + IsFrameDependent: Optional[bool] = field(default=None) # Default: false + FrameDependencyOffsetStart: Optional[int] = field(default=None) # Default: 0 + FrameDependencyOffsetEnd: Optional[int] = field(default=None) # Default: 0 + ResumeOnCompleteDependencies: Optional[bool] = field( + default=True) # Default: true + ResumeOnDeletedDependencies: Optional[bool] = field( + default=False) # Default: false + ResumeOnFailedDependencies: Optional[bool] = field( + default=False) # Default: false + RequiredAssets: Optional[str] = field( + default=None) # Default: blank (comma-separated list) + ScriptDependencies: Optional[str] = field( + default=None) # Default: blank (comma-separated list) + + # Failure Detection + OverrideJobFailureDetection: Optional[bool] = field( + default=False) # Default: false + FailureDetectionJobErrors: Optional[int] = field(default=None) # 0..x + OverrideTaskFailureDetection: Optional[bool] = field( + default=False) # Default: false + FailureDetectionTaskErrors: Optional[int] = field(default=None) # 0..x + IgnoreBadJobDetection: Optional[bool] = field( + default=False) # Default: false + SendJobErrorWarning: Optional[bool] = field( + default=False) # Default: false + + # Cleanup + DeleteOnComplete: Optional[bool] = field(default=False) # Default: false + ArchiveOnComplete: Optional[bool] = field(default=False) # Default: false + OverrideAutoJobCleanup: Optional[bool] = field( + default=False) # Default: false + OverrideJobCleanup: Optional[bool] = field(default=None) + JobCleanupDays: Optional[int] = field( + default=None) # Default: false (not clear) + OverrideJobCleanupType: Optional[str] = field(default=None) + + # Scheduling + ScheduledType: Optional[str] = field( + default=None) # Default: None () + ScheduledStartDateTime: Optional[str] = field( + default=None) #
+ ScheduledDays: Optional[int] = field(default=1) # Default: 1 + JobDelay: Optional[str] = field(default=None) # + Scheduled: Optional[str] = field( + default=None) # Time= + + # Scripts + PreJobScript: Optional[str] = field(default=None) # Default: blank + PostJobScript: Optional[str] = field(default=None) # Default: blank + PreTaskScript: Optional[str] = field(default=None) # Default: blank + PostTaskScript: Optional[str] = field(default=None) # Default: blank + + # Event Opt-Ins + EventOptIns: Optional[str] = field( + default=None) # Default blank (comma-separated list) + + # Environment + EnvironmentKeyValue: str = field(default_factory=lambda: "EnvironmentKeyValue") + IncludeEnvironment: Optional[bool] = field(default=False) # Default: false + UseJobEnvironmentOnly: Optional[bool] = field( + default=False) # Default: false + CustomPluginDirectory: Optional[str] = field(default=None) # Default blank + + # Job Extra Info + ExtraInfoKeyValue: str = field(default_factory=lambda: "ExtraInfoKeyValue") + + OverrideTaskExtraInfoNames: Optional[bool] = field( + default=False) # Default false + + TaskExtraInfoName: str = field(default_factory=lambda: "TaskExtraInfoName") + + OutputFilename: str = field(default_factory=lambda: "OutputFilename") + OutputFilenameTile: str = field(default_factory=lambda: "OutputFilename{}Tile") + OutputDirectory: str = field(default_factory=lambda: "OutputDirectory") + + AssetDependency: str = field(default_factory=lambda: "AssetDependency") + + TileJob: bool = field(default=False) + TileJobFrame: int = field(default=0) + TileJobTilesInX: int = field(default=0) + TileJobTilesInY: int = field(default=0) + TileJobTileCount: int = field(default=0) + + MaintenanceJob: bool = field(default=False) + MaintenanceJobStartFrame: int = field(default=0) + MaintenanceJobEndFrame: int = field(default=0) + @classmethod def from_dict(cls, data: Dict) -> 'JobInfo': + + def capitalize(key): + words = key.split("_") + return "".join(word.capitalize() for word in words) + # Filter the dictionary to only include keys that are fields in the dataclass - filtered_data = {k: v for k, v in data.items() if k in cls.__annotations__} + capitalized = {capitalize(k): v for k, v in data.items()} + filtered_data = {k: v for k, v + in capitalized.items() + if k in cls.__annotations__} return cls(**filtered_data) + + + +arr = {"priority": 40} +job = DeadlineJobInfo.from_dict(arr) +print(job.Priority) \ No newline at end of file diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 88b7a380f9..1070baed4e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -15,7 +15,7 @@ from ayon_core.settings import get_project_settings from ayon_core.lib.profiles_filtering import filter_profiles -from ayon_deadline.lib import FARM_FAMILIES, JobInfo +from ayon_deadline.lib import FARM_FAMILIES, DeadlineJobInfo class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): @@ -37,7 +37,7 @@ def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) attr_values.update(self.get_attr_values_from_data(instance.data)) - job_info = JobInfo.from_dict(attr_values) + job_info = DeadlineJobInfo.from_dict(attr_values) instance.data["deadline"]["job_info"] = job_info @classmethod From 37c0a0d6e80042766de72f5c257e3daffdfe0562 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 14:58:34 +0200 Subject: [PATCH 005/153] Removed six dependency --- .../ayon_deadline/abstract_submit_deadline.py | 350 +----------------- 1 file changed, 6 insertions(+), 344 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index a02a0ce86e..9232efa2f9 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -11,7 +11,7 @@ from functools import partial from collections import OrderedDict -import six + import attr import requests @@ -72,349 +72,11 @@ def requests_get(*args, **kwargs): return requests.get(*args, **kwargs) -class DeadlineKeyValueVar(dict): - """ - - Serializes dictionary key values as "{key}={value}" like Deadline uses - for EnvironmentKeyValue. - - As an example: - EnvironmentKeyValue0="A_KEY=VALUE_A" - EnvironmentKeyValue1="OTHER_KEY=VALUE_B" - - The keys are serialized in alphabetical order (sorted). - - Example: - >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") - >>> var["my_var"] = "hello" - >>> var["my_other_var"] = "hello2" - >>> var.serialize() - - - """ - def __init__(self, key): - super(DeadlineKeyValueVar, self).__init__() - self.__key = key - - def serialize(self): - key = self.__key - - # Allow custom location for index in serialized string - if "{}" not in key: - key = key + "{}" - - return { - key.format(index): "{}={}".format(var_key, var_value) - for index, (var_key, var_value) in enumerate(sorted(self.items())) - } - - -class DeadlineIndexedVar(dict): - """ - - Allows to set and query values by integer indices: - Query: var[1] or var.get(1) - Set: var[1] = "my_value" - Append: var += "value" - - Note: Iterating the instance is not guarantueed to be the order of the - indices. To do so iterate with `sorted()` - - """ - def __init__(self, key): - super(DeadlineIndexedVar, self).__init__() - self.__key = key - - def serialize(self): - key = self.__key - - # Allow custom location for index in serialized string - if "{}" not in key: - key = key + "{}" - - return { - key.format(index): value for index, value in sorted(self.items()) - } - - def next_available_index(self): - # Add as first unused entry - i = 0 - while i in self.keys(): - i += 1 - return i - - def update(self, data): - # Force the integer key check - for key, value in data.items(): - self.__setitem__(key, value) - - def __iadd__(self, other): - index = self.next_available_index() - self[index] = other - return self - - def __setitem__(self, key, value): - if not isinstance(key, int): - raise TypeError("Key must be an integer: {}".format(key)) - - if key < 0: - raise ValueError("Negative index can't be set: {}".format(key)) - dict.__setitem__(self, key, value) - - -@attr.s -class DeadlineJobInfo(object): - """Mapping of all Deadline *JobInfo* attributes. - - This contains all JobInfo attributes plus their default values. - Those attributes set to `None` shouldn't be posted to Deadline as - the only required one is `Plugin`. Their default values used by Deadline - are stated in - comments. - - ..seealso: - https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/manual-submission.html - - """ - - # Required - # ---------------------------------------------- - Plugin = attr.ib() - - # General - Frames = attr.ib(default=None) # default: 0 - Name = attr.ib(default="Untitled") - Comment = attr.ib(default=None) # default: empty - Department = attr.ib(default=None) # default: empty - BatchName = attr.ib(default=None) # default: empty - UserName = attr.ib(default=getpass.getuser()) - MachineName = attr.ib(default=platform.node()) - Pool = attr.ib(default=None) # default: "none" - SecondaryPool = attr.ib(default=None) - Group = attr.ib(default=None) # default: "none" - Priority = attr.ib(default=50) - ChunkSize = attr.ib(default=1) - ConcurrentTasks = attr.ib(default=1) - LimitConcurrentTasksToNumberOfCpus = attr.ib( - default=None) # default: "true" - OnJobComplete = attr.ib(default="Nothing") - SynchronizeAllAuxiliaryFiles = attr.ib(default=None) # default: false - ForceReloadPlugin = attr.ib(default=None) # default: false - Sequential = attr.ib(default=None) # default: false - SuppressEvents = attr.ib(default=None) # default: false - Protected = attr.ib(default=None) # default: false - InitialStatus = attr.ib(default="Active") - NetworkRoot = attr.ib(default=None) - - # Timeouts - # ---------------------------------------------- - MinRenderTimeSeconds = attr.ib(default=None) # Default: 0 - MinRenderTimeMinutes = attr.ib(default=None) # Default: 0 - TaskTimeoutSeconds = attr.ib(default=None) # Default: 0 - TaskTimeoutMinutes = attr.ib(default=None) # Default: 0 - StartJobTimeoutSeconds = attr.ib(default=None) # Default: 0 - StartJobTimeoutMinutes = attr.ib(default=None) # Default: 0 - InitializePluginTimeoutSeconds = attr.ib(default=None) # Default: 0 - # can be one of - OnTaskTimeout = attr.ib(default=None) # Default: Error - EnableTimeoutsForScriptTasks = attr.ib(default=None) # Default: false - EnableFrameTimeouts = attr.ib(default=None) # Default: false - EnableAutoTimeout = attr.ib(default=None) # Default: false - - # Interruptible - # ---------------------------------------------- - Interruptible = attr.ib(default=None) # Default: false - InterruptiblePercentage = attr.ib(default=None) - RemTimeThreshold = attr.ib(default=None) - - # Notifications - # ---------------------------------------------- - # can be comma separated list of users - NotificationTargets = attr.ib(default=None) # Default: blank - ClearNotificationTargets = attr.ib(default=None) # Default: false - # A comma separated list of additional email addresses - NotificationEmails = attr.ib(default=None) # Default: blank - OverrideNotificationMethod = attr.ib(default=None) # Default: false - EmailNotification = attr.ib(default=None) # Default: false - PopupNotification = attr.ib(default=None) # Default: false - # String with `[EOL]` used for end of line - NotificationNote = attr.ib(default=None) # Default: blank - - # Machine Limit - # ---------------------------------------------- - MachineLimit = attr.ib(default=None) # Default: 0 - MachineLimitProgress = attr.ib(default=None) # Default: -1.0 - Whitelist = attr.ib(default=None) # Default: blank - Blacklist = attr.ib(default=None) # Default: blank - - # Limits - # ---------------------------------------------- - # comma separated list of limit groups - LimitGroups = attr.ib(default=None) # Default: blank - - # Dependencies - # ---------------------------------------------- - # comma separated list of job IDs - JobDependencies = attr.ib(default=None) # Default: blank - JobDependencyPercentage = attr.ib(default=None) # Default: -1 - IsFrameDependent = attr.ib(default=None) # Default: false - FrameDependencyOffsetStart = attr.ib(default=None) # Default: 0 - FrameDependencyOffsetEnd = attr.ib(default=None) # Default: 0 - ResumeOnCompleteDependencies = attr.ib(default=None) # Default: true - ResumeOnDeletedDependencies = attr.ib(default=None) # Default: false - ResumeOnFailedDependencies = attr.ib(default=None) # Default: false - # comma separated list of asset paths - RequiredAssets = attr.ib(default=None) # Default: blank - # comma separated list of script paths - ScriptDependencies = attr.ib(default=None) # Default: blank - - # Failure Detection - # ---------------------------------------------- - OverrideJobFailureDetection = attr.ib(default=None) # Default: false - FailureDetectionJobErrors = attr.ib(default=None) # 0..x - OverrideTaskFailureDetection = attr.ib(default=None) # Default: false - FailureDetectionTaskErrors = attr.ib(default=None) # 0..x - IgnoreBadJobDetection = attr.ib(default=None) # Default: false - SendJobErrorWarning = attr.ib(default=None) # Default: false - - # Cleanup - # ---------------------------------------------- - DeleteOnComplete = attr.ib(default=None) # Default: false - ArchiveOnComplete = attr.ib(default=None) # Default: false - OverrideAutoJobCleanup = attr.ib(default=None) # Default: false - OverrideJobCleanup = attr.ib(default=None) - JobCleanupDays = attr.ib(default=None) # Default: false - # - OverrideJobCleanupType = attr.ib(default=None) - - # Scheduling - # ---------------------------------------------- - # - ScheduledType = attr.ib(default=None) # Default: None - #
- ScheduledStartDateTime = attr.ib(default=None) - ScheduledDays = attr.ib(default=None) # Default: 1 - # - JobDelay = attr.ib(default=None) - # Time= - Scheduled = attr.ib(default=None) - - # Scripts - # ---------------------------------------------- - # all accept path to script - PreJobScript = attr.ib(default=None) # Default: blank - PostJobScript = attr.ib(default=None) # Default: blank - PreTaskScript = attr.ib(default=None) # Default: blank - PostTaskScript = attr.ib(default=None) # Default: blank - - # Event Opt-Ins - # ---------------------------------------------- - # comma separated list of plugins - EventOptIns = attr.ib(default=None) # Default: blank - - # Environment - # ---------------------------------------------- - EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, - "EnvironmentKeyValue")) - - IncludeEnvironment = attr.ib(default=None) # Default: false - UseJobEnvironmentOnly = attr.ib(default=None) # Default: false - CustomPluginDirectory = attr.ib(default=None) # Default: blank - - # Job Extra Info - # ---------------------------------------------- - ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo")) - ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar, - "ExtraInfoKeyValue")) - - # Task Extra Info Names - # ---------------------------------------------- - OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false - TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar, - "TaskExtraInfoName")) - - # Output - # ---------------------------------------------- - OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename")) - OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputFilename{}Tile")) - OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar, - "OutputDirectory")) - - # Asset Dependency - # ---------------------------------------------- - AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar, - "AssetDependency")) - - # Tile Job - # ---------------------------------------------- - TileJob = attr.ib(default=None) # Default: false - TileJobFrame = attr.ib(default=None) # Default: 0 - TileJobTilesInX = attr.ib(default=None) # Default: 0 - TileJobTilesInY = attr.ib(default=None) # Default: 0 - TileJobTileCount = attr.ib(default=None) # Default: 0 - - # Maintenance Job - # ---------------------------------------------- - MaintenanceJob = attr.ib(default=None) # Default: false - MaintenanceJobStartFrame = attr.ib(default=None) # Default: 0 - MaintenanceJobEndFrame = attr.ib(default=None) # Default: 0 - - def serialize(self): - """Return all data serialized as dictionary. - - Returns: - OrderedDict: all serialized data. - - """ - def filter_data(a, v): - if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): - return False - if v is None: - return False - return True - - serialized = attr.asdict( - self, dict_factory=OrderedDict, filter=filter_data) - - # Custom serialize these attributes - for attribute in [ - self.EnvironmentKeyValue, - self.ExtraInfo, - self.ExtraInfoKeyValue, - self.TaskExtraInfoName, - self.OutputFilename, - self.OutputFilenameTile, - self.OutputDirectory, - self.AssetDependency - ]: - serialized.update(attribute.serialize()) - - return serialized - - def update(self, data): - """Update instance with data dict""" - for key, value in data.items(): - setattr(self, key, value) - - def add_render_job_env_var(self): - """Add required env vars for valid render job submission.""" - for key, value in get_ayon_render_job_envs().items(): - self.EnvironmentKeyValue[key] = value - - def add_instance_job_env_vars(self, instance): - """Add all job environments as specified on the instance and context - - Any instance `job_env` vars will override the context `job_env` vars. - """ - for key, value in get_instance_job_envs(instance).items(): - self.EnvironmentKeyValue[key] = value - - -@six.add_metaclass(AbstractMetaInstancePlugin) -class AbstractSubmitDeadline(pyblish.api.InstancePlugin, - AYONPyblishPluginMixin): +class AbstractSubmitDeadline( + pyblish.api.InstancePlugin, + AYONPyblishPluginMixin, + metaclass=AbstractMetaInstancePlugin +): """Class abstracting access to Deadline.""" label = "Submit to Deadline" From 335883800d6b3fca5024173f7ac04237dcac5816 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:01:04 +0200 Subject: [PATCH 006/153] Implemented new generic method applicable for all DCC Added new argument for old get_job_info (which should be probabaly renamed) to pass base of prepared object to be enhanced with DCC specific fields --- .../ayon_deadline/abstract_submit_deadline.py | 36 ++++++++++++++++--- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 9232efa2f9..e84ee20509 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -12,7 +12,6 @@ from collections import OrderedDict -import attr import requests import pyblish.api @@ -24,8 +23,7 @@ from ayon_core.pipeline.publish.lib import ( replace_with_published_scene_path ) - -from .lib import get_ayon_render_job_envs, get_instance_job_envs +from ayon_core.lib import is_in_tests JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) @@ -120,7 +118,8 @@ def process(self, instance): self.scene_path = file_path self.log.info("Using {} for render/export.".format(file_path)) - self.job_info = self.get_job_info() + job_info = self.get_generic_job_info(instance) + self.job_info = self.get_job_info(job_info) self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() @@ -157,6 +156,35 @@ def process_submission(self): verify = self._instance.data["deadline"]["verify"] return self.submit(payload, auth, verify) + def get_generic_job_info(self, instance): + context = instance.context + + job_info = instance.data["deadline"]["job_info"] + + # Always use the original work file name for the Job name even when + # rendering is done from the published Work File. The original work + # file name is clearer because it can also have subversion strings, + # etc. which are stripped for the published file. + src_filepath = context.data["currentFile"] + src_filename = os.path.basename(src_filepath) + + if is_in_tests(): + src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + + job_info.Name = "%s - %s" % (src_filename, instance.name) + job_info.BatchName = src_filename + job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser + + first_expected_file = instance.data["expectedFiles"][0] + job_info.OutputFilename += os.path.basename(first_expected_file) + job_info.OutputDirectory += os.path.dirname(first_expected_file) + + # Set job environment variables + job_info.add_instance_job_env_vars(instance) + job_info.add_render_job_env_var() + + return job_info + @abstractmethod def get_job_info(self): """Return filled Deadline JobInfo. From 60da8b939e901a322911cff6f177183a618289ac Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:01:36 +0200 Subject: [PATCH 007/153] Removed unneeded imports --- client/ayon_deadline/abstract_submit_deadline.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index e84ee20509..1046f469c6 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -6,10 +6,9 @@ """ import json.decoder from abc import abstractmethod -import platform import getpass -from functools import partial -from collections import OrderedDict +import os +import datetime import requests From ae3c73917fdff4ab2feb71b2ee1640ee78d8beb9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:02:35 +0200 Subject: [PATCH 008/153] Do not pass empty string to DeadlineJobInfo Empty strings overrides None defaults which might cause issue (it definitely does for job_delay). --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 1070baed4e..18a073a94e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -37,6 +37,12 @@ def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) attr_values.update(self.get_attr_values_from_data(instance.data)) + # do not set empty strings + attr_values = { + key: value + for key,value in attr_values.items() + if value != "" + } job_info = DeadlineJobInfo.from_dict(attr_values) instance.data["deadline"]["job_info"] = job_info From 1c8990a1a99d2fed13e4a377b39d6931ffb8fe25 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:03:06 +0200 Subject: [PATCH 009/153] Removed unneded imports --- .../publish/aftereffects/submit_aftereffects_deadline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index f646df720a..9973a4c37a 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -1,8 +1,6 @@ import os import attr -import getpass import pyblish.api -from datetime import datetime from ayon_core.lib import ( env_value_to_bool, @@ -10,7 +8,6 @@ is_in_tests, ) from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s From 57860cee85e3fedee08d39ba19057f37b7c5b72a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:03:39 +0200 Subject: [PATCH 010/153] Reimplemented get_job_info, moved to generic impl --- .../submit_aftereffects_deadline.py | 36 ++----------------- 1 file changed, 2 insertions(+), 34 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index 9973a4c37a..b4da16a5e1 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -36,47 +36,15 @@ class AfterEffectsSubmitDeadline( targets = ["local"] settings_category = "deadline" - priority = 50 - chunk_size = 1000000 - group = None - department = None - multiprocess = True - - def get_job_info(self): - dln_job_info = DeadlineJobInfo(Plugin="AfterEffects") - - context = self._instance.context - - batch_name = os.path.basename(self._instance.data["source"]) - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - dln_job_info.Name = self._instance.data["name"] - dln_job_info.BatchName = batch_name + def get_job_info(self, dln_job_info): dln_job_info.Plugin = "AfterEffects" - dln_job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) + # Deadline requires integers in frame range frame_range = "{}-{}".format( int(round(self._instance.data["frameStart"])), int(round(self._instance.data["frameEnd"]))) dln_job_info.Frames = frame_range - dln_job_info.Priority = self.priority - dln_job_info.Pool = self._instance.data.get("primaryPool") - dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool") - dln_job_info.Group = self.group - dln_job_info.Department = self.department - dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename += \ - os.path.basename(self._instance.data["expectedFiles"][0]) - dln_job_info.OutputDirectory += \ - os.path.dirname(self._instance.data["expectedFiles"][0]) - dln_job_info.JobDelay = "00:00:00" - - # Set job environment variables - dln_job_info.add_instance_job_env_vars(self._instance) - dln_job_info.add_render_job_env_var() - return dln_job_info def get_plugin_info(self): From 10f566f19307901394eb8b6fe55272fd91164c66 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:05:01 +0200 Subject: [PATCH 011/153] Removed storign deadline info to instance 'deadline' dictionary wasnt used at all, it contained large DeadlineJobInfo which just enlarged metadata json unnecessary. --- .../plugins/publish/global/submit_publish_job.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 65e4285d50..1e89d936f8 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -479,9 +479,11 @@ def process(self, instance): "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": render_job or None, - "instances": instances - } + # do not carry over unnecessary DL info with large DeadlineJobInfo + "instances": [{k: v for k, v in inst.items() if k != "deadline"} + for inst in instances] + } if deadline_publish_job_id: publish_job["deadline_publish_job_id"] = deadline_publish_job_id From bbb12b74f8452ffbe8a5d9c6660ee4abab934be4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:06:05 +0200 Subject: [PATCH 012/153] Updates to DeadlineJobInfo to match previous implementation It is handling EnvironmentKey* type of fields --- client/ayon_deadline/lib.py | 175 +++++++++++++++++++++++++++++++++--- 1 file changed, 161 insertions(+), 14 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index ba6ffdea6d..2fcb643386 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,6 +1,8 @@ import os -from dataclasses import dataclass, field -from typing import Optional, Dict, List +from dataclasses import dataclass, field, asdict +from functools import partial +from typing import Optional, Dict, Any +import json # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ @@ -47,6 +49,100 @@ def get_instance_job_envs(instance) -> "dict[str, str]": return env +class DeadlineKeyValueVar(dict): + """ + + Serializes dictionary key values as "{key}={value}" like Deadline uses + for EnvironmentKeyValue. + + As an example: + EnvironmentKeyValue0="A_KEY=VALUE_A" + EnvironmentKeyValue1="OTHER_KEY=VALUE_B" + + The keys are serialized in alphabetical order (sorted). + + Example: + >>> var = DeadlineKeyValueVar("EnvironmentKeyValue") + >>> var["my_var"] = "hello" + >>> var["my_other_var"] = "hello2" + >>> var.serialize() + + + """ + def __init__(self, key): + super(DeadlineKeyValueVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): "{}={}".format(var_key, var_value) + for index, (var_key, var_value) in enumerate(sorted(self.items())) + } + +# def DeadlineKeyValueVar(key: str) -> Any: +# # Placeholder for the actual implementation +# return f"Value for {key}" + + +class DeadlineIndexedVar(dict): + """ + + Allows to set and query values by integer indices: + Query: var[1] or var.get(1) + Set: var[1] = "my_value" + Append: var += "value" + + Note: Iterating the instance is not guarantueed to be the order of the + indices. To do so iterate with `sorted()` + + """ + def __init__(self, key): + super(DeadlineIndexedVar, self).__init__() + self.__key = key + + def serialize(self): + key = self.__key + + # Allow custom location for index in serialized string + if "{}" not in key: + key = key + "{}" + + return { + key.format(index): value for index, value in sorted(self.items()) + } + + def next_available_index(self): + # Add as first unused entry + i = 0 + while i in self.keys(): + i += 1 + return i + + def update(self, data): + # Force the integer key check + for key, value in data.items(): + self.__setitem__(key, value) + + def __iadd__(self, other): + index = self.next_available_index() + self[index] = other + return self + + def __setitem__(self, key, value): + if not isinstance(key, int): + raise TypeError("Key must be an integer: {}".format(key)) + + if key < 0: + raise ValueError("Negative index can't be set: {}".format(key)) + dict.__setitem__(self, key, value) + + @dataclass class DeadlineJobInfo: """Mapping of all Deadline JobInfo attributes. @@ -189,25 +285,33 @@ class DeadlineJobInfo: default=None) # Default blank (comma-separated list) # Environment - EnvironmentKeyValue: str = field(default_factory=lambda: "EnvironmentKeyValue") + EnvironmentKeyValue: Any = field( + default_factory=partial(DeadlineKeyValueVar, "EnvironmentKeyValue")) IncludeEnvironment: Optional[bool] = field(default=False) # Default: false - UseJobEnvironmentOnly: Optional[bool] = field( - default=False) # Default: false + UseJobEnvironmentOnly: Optional[bool] = field(default=False) # Default: false CustomPluginDirectory: Optional[str] = field(default=None) # Default blank # Job Extra Info - ExtraInfoKeyValue: str = field(default_factory=lambda: "ExtraInfoKeyValue") + ExtraInfo: Any = field( + default_factory=partial(DeadlineIndexedVar, "ExtraInfo")) + ExtraInfoKeyValue: Any = field( + default_factory=partial(DeadlineKeyValueVar, "ExtraInfoKeyValue")) OverrideTaskExtraInfoNames: Optional[bool] = field( default=False) # Default false - TaskExtraInfoName: str = field(default_factory=lambda: "TaskExtraInfoName") + TaskExtraInfoName: Any = field( + default_factory=partial(DeadlineIndexedVar, "TaskExtraInfoName")) - OutputFilename: str = field(default_factory=lambda: "OutputFilename") - OutputFilenameTile: str = field(default_factory=lambda: "OutputFilename{}Tile") - OutputDirectory: str = field(default_factory=lambda: "OutputDirectory") + OutputFilename: Any = field( + default_factory=partial(DeadlineIndexedVar, "OutputFilename")) + OutputFilenameTile: str = field( + default_factory=partial(DeadlineIndexedVar, "OutputFilename{}Tile")) + OutputDirectory: str = field( + default_factory=partial(DeadlineIndexedVar, "OutputDirectory")) - AssetDependency: str = field(default_factory=lambda: "AssetDependency") + AssetDependency: str = field( + default_factory=partial(DeadlineIndexedVar, "AssetDependency")) TileJob: bool = field(default=False) TileJobFrame: int = field(default=0) @@ -219,6 +323,38 @@ class DeadlineJobInfo: MaintenanceJobStartFrame: int = field(default=0) MaintenanceJobEndFrame: int = field(default=0) + def serialize(self): + """Return all data serialized as dictionary. + + Returns: + OrderedDict: all serialized data. + + """ + def filter_data(a, v): + if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)): + return False + if v is None: + return False + return True + + serialized = asdict(self) + serialized = {k: v for k, v in serialized.items() + if filter_data(k, v)} + + # Custom serialize these attributes + for attribute in [ + self.EnvironmentKeyValue, + self.ExtraInfo, + self.ExtraInfoKeyValue, + self.TaskExtraInfoName, + self.OutputFilename, + self.OutputFilenameTile, + self.OutputDirectory, + self.AssetDependency + ]: + serialized.update(attribute.serialize()) + + return serialized @classmethod def from_dict(cls, data: Dict) -> 'JobInfo': @@ -234,8 +370,19 @@ def capitalize(key): if k in cls.__annotations__} return cls(**filtered_data) + def add_render_job_env_var(self): + """Add required env vars for valid render job submission.""" + for key, value in get_ayon_render_job_envs().items(): + self.EnvironmentKeyValue[key] = value + + def add_instance_job_env_vars(self, instance): + """Add all job environments as specified on the instance and context + Any instance `job_env` vars will override the context `job_env` vars. + """ + for key, value in get_instance_job_envs(instance).items(): + self.EnvironmentKeyValue[key] = value -arr = {"priority": 40} -job = DeadlineJobInfo.from_dict(arr) -print(job.Priority) \ No newline at end of file + def to_json(self) -> str: + """Serialize the dataclass instance to a JSON string.""" + return json.dumps(asdict(self)) \ No newline at end of file From c26c9970741e611e4d1119dbf9666bb03e626ab3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 15:34:16 +0200 Subject: [PATCH 013/153] Added Frames field for explicit rendering --- .../submit_aftereffects_deadline.py | 12 +++--- .../plugins/publish/global/collect_jobinfo.py | 39 +++++++++++++------ 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index b4da16a5e1..3966a51588 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -39,11 +39,13 @@ class AfterEffectsSubmitDeadline( def get_job_info(self, dln_job_info): dln_job_info.Plugin = "AfterEffects" - # Deadline requires integers in frame range - frame_range = "{}-{}".format( - int(round(self._instance.data["frameStart"])), - int(round(self._instance.data["frameEnd"]))) - dln_job_info.Frames = frame_range + # already collected explicit values for rendered Frames + if not dln_job_info.Frames: + # Deadline requires integers in frame range + frame_range = "{}-{}".format( + int(round(self._instance.data["frameStart"])), + int(round(self._instance.data["frameEnd"]))) + dln_job_info.Frames = frame_range return dln_job_info diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 18a073a94e..b5e8032ea9 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -86,6 +86,31 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs = [] + + defs.extend([ + UISeparatorDef("deadline_defs_starts"), + ]) + + defs.extend(cls._get_artist_overrides(overrides, profile)) + + defs.append( + TextDef( + "frames", + label="Frames", + default="", + tooltip="Explicit frames to be rendered. (1, 3-4)" + ) + ) + + defs.append( + UISeparatorDef("deadline_defs_end") + ) + + return defs + + @classmethod + def _get_artist_overrides(cls, overrides, profile): + """Provide list of Defs that could be filled by artist""" # should be matching to extract_jobinfo_overrides_enum override_defs = OrderedDict({ "chunkSize": NumberDef( @@ -118,11 +143,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): default="" ), }) - - defs.extend([ - UISeparatorDef("options"), - ]) - + defs = [] # The Arguments that can be modified by the Publisher for key, value in override_defs.items(): if key not in overrides: @@ -132,10 +153,6 @@ def get_attr_defs_for_instance(cls, create_context, instance): value.default = default_value defs.append(value) - defs.append( - UISeparatorDef("sep_alembic_options_end") - ) - return defs @classmethod @@ -202,8 +219,8 @@ class CollectMayaJobInfo(CollectJobInfo): "maya", ] @classmethod - def get_attribute_defs(cls): - defs = super().get_attribute_defs() + def get_attr_defs_for_instance(cls, create_context, instance): + defs = super().get_attr_defs_for_instance(create_context, instance) defs.extend([ NumberDef( From e1eba7f91937f8c3d5ae6d72ba68428ed85c9ead Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:15:05 +0200 Subject: [PATCH 014/153] Run CollectJobInfo only locally --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index b5e8032ea9..4d416a468e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -32,6 +32,7 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): label = "Collect Deadline JobInfo" families = FARM_FAMILIES + targets = ["local"] def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) @@ -93,6 +94,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs.extend(cls._get_artist_overrides(overrides, profile)) + # explicit defs.append( TextDef( "frames", From 8062462d8239b15095348e36f237c5c2c2d72406 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:19:32 +0200 Subject: [PATCH 015/153] Implemented explicit frames filtering on simple files Not yet working on AOVs. --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 1e89d936f8..77fa112765 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -398,7 +398,8 @@ def process(self, instance): self.skip_integration_repre_list, do_not_add_review, instance.context, - self + self, + instance.data["deadline"]["job_info"].Frames ) if "representations" not in instance_skeleton_data.keys(): From ec908e05ea8dea1e055d5b817acd686b8b2995bd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:27:29 +0200 Subject: [PATCH 016/153] Reworked removal of deadline in submit job Deadline portion must be there for ValidateExpectedFiles --- .../plugins/publish/global/submit_publish_job.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 77fa112765..b6e430b1fd 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,6 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: + instance.data["deadline"].pop("job_info") inst["deadline"] = instance.data["deadline"] # publish job file @@ -481,8 +482,7 @@ def process(self, instance): "comment": instance.context.data.get("comment"), "job": render_job or None, # do not carry over unnecessary DL info with large DeadlineJobInfo - "instances": [{k: v for k, v in inst.items() if k != "deadline"} - for inst in instances] + "instances": instances } if deadline_publish_job_id: From 64b22ff2504607ecb1cdcf8cff18e5d96192c5c6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:30:01 +0200 Subject: [PATCH 017/153] Removed empty line --- server/settings/publish_plugins.py | 1 - 1 file changed, 1 deletion(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index f5cc685e57..9e48fdc120 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -39,7 +39,6 @@ def extract_jobinfo_overrides_enum(): {"label": "Limit groups", "value": "limit_groups"}, {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, {"label": "Group", "value": "group"}, - ] From 0d3125dd025735e94fcc94dc41c52c8f0705f4c8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:30:43 +0200 Subject: [PATCH 018/153] Added empty line at the end --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 4d416a468e..c64fa3b698 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -238,4 +238,4 @@ def get_attr_defs_for_instance(cls, create_context, instance): ), ]) - return defs \ No newline at end of file + return defs From c206b2489821ab9308df1e2094aed1996883b279 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:31:27 +0200 Subject: [PATCH 019/153] Formatting --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index c64fa3b698..6213cd8b95 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -87,7 +87,6 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs = [] - defs.extend([ UISeparatorDef("deadline_defs_starts"), ]) From 803c25c81afcca0e6d159ca70fbf9a464f1979a1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 18:34:16 +0200 Subject: [PATCH 020/153] Removed forgotten dev code --- client/ayon_deadline/lib.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 2fcb643386..7384ac6538 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -85,10 +85,6 @@ def serialize(self): for index, (var_key, var_value) in enumerate(sorted(self.items())) } -# def DeadlineKeyValueVar(key: str) -> Any: -# # Placeholder for the actual implementation -# return f"Value for {key}" - class DeadlineIndexedVar(dict): """ From 201c5b801f5ae8e9dae2d1e7c11dfa06176340a6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Oct 2024 19:50:58 +0200 Subject: [PATCH 021/153] Refactor batchname --- client/ayon_deadline/abstract_submit_deadline.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 1046f469c6..775f7bf9e5 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -164,14 +164,13 @@ def get_generic_job_info(self, instance): # rendering is done from the published Work File. The original work # file name is clearer because it can also have subversion strings, # etc. which are stripped for the published file. - src_filepath = context.data["currentFile"] - src_filename = os.path.basename(src_filepath) + batch_name = os.path.basename(context.data["currentFile"]) if is_in_tests(): - src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - job_info.Name = "%s - %s" % (src_filename, instance.name) - job_info.BatchName = src_filename + job_info.Name = "%s - %s" % (batch_name, instance.name) + job_info.BatchName = batch_name job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser first_expected_file = instance.data["expectedFiles"][0] From 6a280b001ac9748ab50b8c94689c2740451d2ce3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 14:51:50 +0100 Subject: [PATCH 022/153] Added last empty line --- client/ayon_deadline/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 7384ac6538..1fa782151f 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -381,4 +381,4 @@ def add_instance_job_env_vars(self, instance): def to_json(self) -> str: """Serialize the dataclass instance to a JSON string.""" - return json.dumps(asdict(self)) \ No newline at end of file + return json.dumps(asdict(self)) From 6d3fe14f0ca4f235d7f7747cb729584f9aeba6fa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 14:52:48 +0100 Subject: [PATCH 023/153] Changed collection of project_settings --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 6213cd8b95..38cacdd6ba 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -57,7 +57,9 @@ def get_attr_defs_for_instance(cls, create_context, instance): # will be reworked when CreateContext contains settings and task types project_name = create_context.project_name - project_settings = get_project_settings(project_name) + project_settings = ( + create_context.get_current_project_settings() + ) host_name = create_context.host_name From f09cde72528e876731ad90bd159fc699ea7a2322 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 14:56:20 +0100 Subject: [PATCH 024/153] Removed hiding attributes on disabled instances --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 38cacdd6ba..2ef325a32b 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -52,9 +52,6 @@ def get_attr_defs_for_instance(cls, create_context, instance): if not cls.instance_matches_plugin_families(instance): return [] - if not instance["active"]: # TODO origin_data seem not right - return [] - # will be reworked when CreateContext contains settings and task types project_name = create_context.project_name project_settings = ( From 47f0f8b39e21c4d004f9e65796052844f8d30a03 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:09:01 +0100 Subject: [PATCH 025/153] Replaced querying with cached methods Depends on ayon-core#967 --- .../plugins/publish/global/collect_jobinfo.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 2ef325a32b..704beac93d 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -58,19 +58,18 @@ def get_attr_defs_for_instance(cls, create_context, instance): create_context.get_current_project_settings() ) - host_name = create_context.host_name - - task_name = instance["task"] - folder_path = instance["folderPath"] - folder_entity = ayon_api.get_folder_by_path(project_name,folder_path) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name) profiles = ( project_settings["deadline"]["publish"][cls.__name__]["profiles"]) if not profiles: return [] + host_name = create_context.host_name + + task_name = instance["task"] + folder_path = instance["folderPath"] + task_entity = create_context.get_task_entity(folder_path, task_name) + profile = filter_profiles( profiles, { From 3ae7c94738fe9d09d7518c990a69728aedc0996b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:09:25 +0100 Subject: [PATCH 026/153] Fixed handling of list defaults --- .../plugins/publish/global/collect_jobinfo.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 704beac93d..4f830f5ee1 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -133,6 +133,7 @@ def _get_artist_overrides(cls, overrides, profile): "limit_groups": TextDef( "limit_groups", label="Limit Groups", + # multiline=True, TODO - some DCC might have issues with storing multi lines default="", placeholder="machine1,machine2" ), @@ -144,13 +145,16 @@ def _get_artist_overrides(cls, overrides, profile): }) defs = [] # The Arguments that can be modified by the Publisher - for key, value in override_defs.items(): + for key, definition in override_defs.items(): if key not in overrides: continue default_value = profile[key] - value.default = default_value - defs.append(value) + if (isinstance(definition, TextDef) and + isinstance(default_value, list)): + default_value = ",".join(default_value) + definition.default = default_value + defs.append(definition) return defs From 41e2f032d970a177baac3b757527397da3cb0586 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:19:10 +0100 Subject: [PATCH 027/153] Refactor defs initialization --- .../plugins/publish/global/collect_jobinfo.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 4f830f5ee1..e1b85b2b9f 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -49,6 +49,7 @@ def process(self, instance): @classmethod def get_attr_defs_for_instance(cls, create_context, instance): + cls.log.info(create_context.get_current_task_entity()) if not cls.instance_matches_plugin_families(instance): return [] @@ -83,11 +84,9 @@ def get_attr_defs_for_instance(cls, create_context, instance): if not profile or not overrides: return [] - defs = [] - - defs.extend([ + defs = [ UISeparatorDef("deadline_defs_starts"), - ]) + ] defs.extend(cls._get_artist_overrides(overrides, profile)) @@ -163,7 +162,7 @@ def register_create_context_callbacks(cls, create_context): create_context.add_value_changed_callback(cls.on_values_changed) @classmethod - def on_value_change(cls, event): + def on_values_changed(cls, event): for instance_change in event["changes"]: if not cls.instance_matches_plugin_families(instance): continue From 82273766963081b76c099d2fd1d914db66f5f341 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:21:30 +0100 Subject: [PATCH 028/153] Fix instance variable --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index e1b85b2b9f..0a2e45a10f 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -164,12 +164,13 @@ def register_create_context_callbacks(cls, create_context): @classmethod def on_values_changed(cls, event): for instance_change in event["changes"]: + instance = instance_change["instance"] if not cls.instance_matches_plugin_families(instance): continue value_changes = instance_change["changes"] if "enabled" not in value_changes: continue - instance = instance_change["instance"] + new_attrs = cls.get_attr_defs_for_instance( event["create_context"], instance ) From 24dbf6d76e0cb7c9d6f3bd31df849f8c8e336c21 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:22:15 +0100 Subject: [PATCH 029/153] Formatting --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 0a2e45a10f..7eb22a9f01 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -197,7 +197,8 @@ def _get_jobinfo_defaults(self, instance): task_name = task_entity["name"] task_type = task_entity["taskType"] profiles = ( - project_settings["deadline"] + project_settings + ["deadline"] ["publish"] ["CollectJobInfo"] ["profiles"] From 87d5f4b1cb070809a8ea64741e8a36415d8b2d6b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:25:51 +0100 Subject: [PATCH 030/153] Refactored base definition to array Dict was not necessary --- .../plugins/publish/global/collect_jobinfo.py | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 7eb22a9f01..d9468a0f62 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -110,8 +110,8 @@ def get_attr_defs_for_instance(cls, create_context, instance): def _get_artist_overrides(cls, overrides, profile): """Provide list of Defs that could be filled by artist""" # should be matching to extract_jobinfo_overrides_enum - override_defs = OrderedDict({ - "chunkSize": NumberDef( + override_defs = [ + NumberDef( "chunkSize", label="Frames Per Task", default=1, @@ -119,41 +119,41 @@ def _get_artist_overrides(cls, overrides, profile): minimum=1, maximum=1000 ), - "priority": NumberDef( + NumberDef( "priority", label="Priority", decimals=0 ), - "department": TextDef( + TextDef( "department", label="Department", default="", ), - "limit_groups": TextDef( + TextDef( "limit_groups", label="Limit Groups", # multiline=True, TODO - some DCC might have issues with storing multi lines default="", placeholder="machine1,machine2" ), - "job_delay": TextDef( + TextDef( "job_delay", label="Delay job (timecode dd:hh:mm:ss)", default="" - ), - }) + ) + ] defs = [] # The Arguments that can be modified by the Publisher - for key, definition in override_defs.items(): - if key not in overrides: + for attr_def in override_defs: + if attr_def.key not in overrides: continue - default_value = profile[key] - if (isinstance(definition, TextDef) and + default_value = profile[attr_def.key] + if (isinstance(attr_def, TextDef) and isinstance(default_value, list)): default_value = ",".join(default_value) - definition.default = default_value - defs.append(definition) + attr_def.default = default_value + defs.append(attr_def) return defs From 2fbc45bfe889037241fc80432dd197726a828312 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:26:58 +0100 Subject: [PATCH 031/153] Fix default values for not existing task entity --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index d9468a0f62..bf0cf93ce4 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -192,7 +192,7 @@ def _get_jobinfo_defaults(self, instance): project_settings = context_data["project_settings"] task_entity = context_data["taskEntity"] - task_name = task_type = "" + task_name = task_type = None if task_entity: task_name = task_entity["name"] task_type = task_entity["taskType"] From c6416ad27a04ec7e971df84972c14ab3f408a1b2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 30 Oct 2024 17:27:50 +0100 Subject: [PATCH 032/153] Removed unnecessary condition --- .../plugins/publish/global/collect_jobinfo.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index bf0cf93ce4..21f0ae48ca 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -203,18 +203,18 @@ def _get_jobinfo_defaults(self, instance): ["CollectJobInfo"] ["profiles"] ) - if profiles: - profile = filter_profiles( - profiles, - { - "host_names": host_name, - "task_types": task_type, - "task_names": task_name, - # "product_type": product_type - } - ) - if profile: - attr_values = profile + + profile = filter_profiles( + profiles, + { + "host_names": host_name, + "task_types": task_type, + "task_names": task_name, + # "product_type": product_type + } + ) + if profile: + attr_values = profile return attr_values From e593e5d6d43b42ece78a585f6ad8388a0169b92d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Oct 2024 14:27:31 +0100 Subject: [PATCH 033/153] Separated AYON fields from Deadline own This should highlight better separation of AYON fields, that are used for some additional logic. --- client/ayon_deadline/lib.py | 34 ++++++++++++++++--- .../plugins/publish/global/collect_jobinfo.py | 10 ++---- server/settings/publish_plugins.py | 6 ++-- 3 files changed, 35 insertions(+), 15 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 1fa782151f..44511ca475 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,3 +1,4 @@ +import inspect import os from dataclasses import dataclass, field, asdict from functools import partial @@ -319,6 +320,16 @@ class DeadlineJobInfo: MaintenanceJobStartFrame: int = field(default=0) MaintenanceJobEndFrame: int = field(default=0) + +@dataclass +class AYONDeadlineJobInfo(DeadlineJobInfo): + """Contains additional AYON variables from Settings for internal logic.""" + + # AYON custom fields used for Settings + UsePublished: Optional[bool] = field(default=None) + UseAssetDependencies: Optional[bool] = field(default=None) + UseWorkfileDependency: Optional[bool] = field(default=None) + def serialize(self): """Return all data serialized as dictionary. @@ -334,8 +345,10 @@ def filter_data(a, v): return True serialized = asdict(self) - serialized = {k: v for k, v in serialized.items() - if filter_data(k, v)} + serialized = { + k: v for k, v in serialized.items() + if filter_data(k, v) + } # Custom serialize these attributes for attribute in [ @@ -356,14 +369,25 @@ def filter_data(a, v): def from_dict(cls, data: Dict) -> 'JobInfo': def capitalize(key): + """Transform AYON looking variables from Settings to DL looking. + + AYON uses python like variable names, eg use_published, DL JobInfo + uses capitalized, eg. UsePublished. + This method does the conversion based on this assumption. + """ words = key.split("_") return "".join(word.capitalize() for word in words) # Filter the dictionary to only include keys that are fields in the dataclass capitalized = {capitalize(k): v for k, v in data.items()} - filtered_data = {k: v for k, v - in capitalized.items() - if k in cls.__annotations__} + all_fields = set( + DeadlineJobInfo.__annotations__).union(set(cls.__annotations__) + ) + filtered_data = { + k: v for k, v + in capitalized.items() + if k in all_fields + } return cls(**filtered_data) def add_render_job_env_var(self): diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 21f0ae48ca..4c64030d77 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -1,7 +1,4 @@ # -*- coding: utf-8 -*- -from collections import OrderedDict - -import ayon_api import pyblish.api from ayon_core.lib import ( BoolDef, @@ -12,10 +9,9 @@ UISeparatorDef ) from ayon_core.pipeline.publish import AYONPyblishPluginMixin -from ayon_core.settings import get_project_settings from ayon_core.lib.profiles_filtering import filter_profiles -from ayon_deadline.lib import FARM_FAMILIES, DeadlineJobInfo +from ayon_deadline.lib import FARM_FAMILIES, AYONDeadlineJobInfo class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): @@ -44,7 +40,7 @@ def process(self, instance): for key,value in attr_values.items() if value != "" } - job_info = DeadlineJobInfo.from_dict(attr_values) + job_info = AYONDeadlineJobInfo.from_dict(attr_values) instance.data["deadline"]["job_info"] = job_info @classmethod @@ -231,12 +227,10 @@ def get_attr_defs_for_instance(cls, create_context, instance): "tile_priority", label="Tile Assembler Priority", decimals=0, - default=cls.tile_priorit ), BoolDef( "strict_error_checking", label="Strict Error Checking", - default=cls.strict_error_checking ), ]) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 768c3d2a45..4d70b9c92e 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -78,8 +78,10 @@ class CollectJobInfoItem(BaseSettingsModel): placeholder="dd:hh:mm:ss" ) use_published: bool = SettingsField(True, title="Use Published scene") - asset_dependencies: bool = SettingsField(True, title="Use Asset dependencies") - workfile_dependency: bool = SettingsField(True, title="Workfile Dependency") + use_asset_dependencies: bool = SettingsField( + True, title="Use Asset dependencies") + use_workfile_dependency: bool = SettingsField( + True, title="Workfile Dependency") multiprocess: bool = SettingsField(False, title="Multiprocess") env_allowed_keys: list[str] = SettingsField( From dfa544b37587f18087050f2650754717e63d7438 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Oct 2024 16:46:54 +0100 Subject: [PATCH 034/153] Updates to limit_groups, machine lists --- server/settings/publish_plugins.py | 41 ++++++++++++++++++++++++++---- 1 file changed, 36 insertions(+), 5 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 4d70b9c92e..c7470da7ba 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -37,12 +37,14 @@ class CollectDeadlinePoolsModel(BaseSettingsModel): def extract_jobinfo_overrides_enum(): return [ - {"label": "Frames per Task", "value": "chunk_size"}, - {"label": "Priority", "value": "priority"}, {"label": "Department", "value": "department"}, - {"label": "Limit groups", "value": "limit_groups"}, {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, + {"label": "Frames per Task", "value": "chunk_size"}, {"label": "Group", "value": "group"}, + {"label": "Priority", "value": "priority"}, + {"label": "Limit groups", "value": "limit_groups"}, + {"label": "Machine List", "value": "machine_list"}, + {"label": "Machine List is a Deny", "value": "machine_list_deny"}, ] @@ -65,9 +67,33 @@ class CollectJobInfoItem(BaseSettingsModel): chunk_size: int = SettingsField(999, title="Frames per Task") priority: int = SettingsField(50, title="Priority") group: str = SettingsField("", title="Group") - limit_groups: list[LimitGroupsSubmodel] = SettingsField( + limit_groups: list[str] = SettingsField( default_factory=list, - title="Limit Groups", + title="Limit Groups" + ) + machine_limit: int = SettingsField( + 0, + title="Machine Limit", + description=( + "Specifies the maximum number of machines this job can be" + " rendered on at the same time (default = 0, which means" + " unlimited)." + ) + ) + machine_list: list[str] = SettingsField( + default_factory=list, + title="Machine List", + description=( + "List of workers where submission can/cannot run " + "based on Machine Allow/Deny toggle." + ) + ) + machine_list_deny: bool = SettingsField( + False, title="Machine List is a Deny", + description=( + "Explicitly DENY list of machines to render. Without it " + "it will ONLY ALLOW machines from list." + ) ) concurrent_tasks: int = SettingsField( 1, title="Number of concurrent tasks") @@ -225,6 +251,11 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") + node_class_limit_groups: list[LimitGroupsSubmodel] = SettingsField( + default_factory=list, + title="Node based Limit Groups", + ) + class HarmonySubmitDeadlineModel(BaseSettingsModel): """Harmony deadline submitter settings.""" From ac77178ca36d1d7def78e070ac021747f7afb972 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Oct 2024 17:02:50 +0100 Subject: [PATCH 035/153] Updated machine_limit handling --- client/ayon_deadline/abstract_submit_deadline.py | 15 ++++++++++++--- client/ayon_deadline/lib.py | 4 ++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 775f7bf9e5..37e7ba6f32 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -173,9 +173,18 @@ def get_generic_job_info(self, instance): job_info.BatchName = batch_name job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser - first_expected_file = instance.data["expectedFiles"][0] - job_info.OutputFilename += os.path.basename(first_expected_file) - job_info.OutputDirectory += os.path.dirname(first_expected_file) + # Adding file dependencies. + if not is_in_tests() and job_info.UseAssetDependencies: + dependencies = instance.context.data.get("fileDependencies", []) + for dependency in dependencies: + job_info.AssetDependency += dependency + + machine_list = job_info.MachineList + if machine_list: + if job_info.MachineListDeny: + job_info.Blacklist = machine_list + else: + job_info.Whitelist = machine_list # Set job environment variables job_info.add_instance_job_env_vars(instance) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 44511ca475..d8f2ad34a1 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -330,6 +330,10 @@ class AYONDeadlineJobInfo(DeadlineJobInfo): UseAssetDependencies: Optional[bool] = field(default=None) UseWorkfileDependency: Optional[bool] = field(default=None) + MachineList: Optional[str] = field( + default=None) # Default blank (comma-separated list) + MachineListDeny: Optional[bool] = field(default=None) + def serialize(self): """Return all data serialized as dictionary. From 068ae9017e9eab3a59f209d5d3074a8eca1cf1c7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Oct 2024 17:03:18 +0100 Subject: [PATCH 036/153] Updated expected_files handling --- client/ayon_deadline/abstract_submit_deadline.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 37e7ba6f32..5d26bc165a 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -22,6 +22,7 @@ from ayon_core.pipeline.publish.lib import ( replace_with_published_scene_path ) +from ayon_core.pipeline.farm.tools import iter_expected_files from ayon_core.lib import is_in_tests JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) @@ -173,6 +174,11 @@ def get_generic_job_info(self, instance): job_info.BatchName = batch_name job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser + exp = instance.data.get("expectedFiles") + for filepath in iter_expected_files(exp): + job_info.OutputDirectory += os.path.dirname(filepath) + job_info.OutputFilename += os.path.basename(filepath) + # Adding file dependencies. if not is_in_tests() and job_info.UseAssetDependencies: dependencies = instance.context.data.get("fileDependencies", []) From 0e3760a141d6f1281f6459d0091f0959c3a88d97 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 31 Oct 2024 18:52:37 +0100 Subject: [PATCH 037/153] Implemented additional JobInfo, PluginInfo fields This field from Settings could be used to push in any not explicitly implemented JobInfo field. Current implementation handles also PluginInfo fields, where it might be better to split it to separate object, but not now. --- .../ayon_deadline/abstract_submit_deadline.py | 29 +++++++++++++++---- client/ayon_deadline/lib.py | 3 ++ .../submit_aftereffects_deadline.py | 2 +- server/settings/publish_plugins.py | 28 ++++++++++-------- 4 files changed, 44 insertions(+), 18 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 5d26bc165a..01b69060f6 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -123,6 +123,8 @@ def process(self, instance): self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() + self.apply_additional_info(job_info) + job_id = self.process_submission() self.log.info("Submitted job to Deadline: {}.".format(job_id)) @@ -130,8 +132,9 @@ def process(self, instance): if instance.data.get("splitRender"): self.log.info("Splitting export and render in two jobs") self.log.info("Export job id: %s", job_id) - render_job_info = self.get_job_info(dependency_job_ids=[job_id]) - render_plugin_info = self.get_plugin_info(job_type="render") + render_job_info = self.get_job_info( + job_info=job_info, dependency_job_ids=[job_id]) + render_plugin_info = self.get_plugin_info(ob_type="render") payload = self.assemble_payload( job_info=render_job_info, plugin_info=render_plugin_info @@ -198,14 +201,30 @@ def get_generic_job_info(self, instance): return job_info + def apply_additional_info(self, job_info): + """Adds additional fields and values which aren't explicitly impl.""" + if job_info.AdditionalJobInfo: + for key, value in json.loads(job_info.AdditionalJobInfo).items(): + setattr(self.job_info, key, value) + + if job_info.AdditionalPluginInfo: + plugin_info = json.loads(job_info.AdditionalPluginInfo) + for key, value in plugin_info.items(): + # self.plugin_info is dict, should it be? + self.plugin_info[key] = value + @abstractmethod - def get_job_info(self): + def get_job_info(self, job_info=None, **kwargs): """Return filled Deadline JobInfo. This is host/plugin specific implementation of how to fill data in. + Args: + job_info (AYONDeadlineJobInfo): dataclass object with collected + values from Settings and Publisher UI + See: - :class:`DeadlineJobInfo` + :class:`AYONDeadlineJobInfo` Returns: :class:`DeadlineJobInfo`: Filled Deadline JobInfo. @@ -214,7 +233,7 @@ def get_job_info(self): pass @abstractmethod - def get_plugin_info(self): + def get_plugin_info(self, **kwargs): """Return filled Deadline PluginInfo. This is host/plugin specific implementation of how to fill data in. diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index d8f2ad34a1..669e1b5f47 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -334,6 +334,9 @@ class AYONDeadlineJobInfo(DeadlineJobInfo): default=None) # Default blank (comma-separated list) MachineListDeny: Optional[bool] = field(default=None) + AdditionalJobInfo: Optional[str] = field(default=None) # Default: blank + AdditionalPluginInfo: Optional[str] = field(default=None) # Default: blank + def serialize(self): """Return all data serialized as dictionary. diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index 3966a51588..bc4baffac4 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -36,7 +36,7 @@ class AfterEffectsSubmitDeadline( targets = ["local"] settings_category = "deadline" - def get_job_info(self, dln_job_info): + def get_job_info(self, dln_job_info=None): dln_job_info.Plugin = "AfterEffects" # already collected explicit values for rendered Frames diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index c7470da7ba..c383f27f54 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -120,7 +120,23 @@ class CollectJobInfoItem(BaseSettingsModel): title="Search & replace in environment values", description="Replace string values in 'Name' with value from 'Value'" ) + additional_job_info: str = SettingsField( + "", + title="Additional JobInfo data", + widget="textarea", + description= + "Dictionary (JSON parsable) to paste unto JobInfo of submission" + ) + additional_plugin_info: str = SettingsField( + "", + title="Additional PluginInfo data", + widget="textarea", + description= + "Dictionary (JSON parsable) to paste unto PluginInfo " + "of submission" + ) overrides: list[str] = SettingsField( + "", enum_resolver=extract_jobinfo_overrides_enum, title="Exposed Overrides", description=( @@ -185,14 +201,6 @@ class MayaSubmitDeadlineModel(BaseSettingsModel): title="Tile Assembler Plugin", enum_resolver=tile_assembler_enum, ) - jobInfo: str = SettingsField( - title="Additional JobInfo data", - widget="textarea", - ) - pluginInfo: str = SettingsField( - title="Additional PluginInfo data", - widget="textarea", - ) scene_patches: list[ScenePatchesSubmodel] = SettingsField( default_factory=list, @@ -516,10 +524,6 @@ class PublishPluginsModel(BaseSettingsModel): "import_reference": False, "strict_error_checking": True, "tile_priority": 50, - # this used to be empty dict - "jobInfo": "", - # this used to be empty dict - "pluginInfo": "", "scene_patches": [] }, "NukeSubmitDeadline": { From 26a63d0c2a3060312b6f2cd2616c4e6410047dee Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 11:28:23 +0100 Subject: [PATCH 038/153] Refactor Co-authored-by: Roy Nieterau --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 4c64030d77..8b3267171e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -209,9 +209,7 @@ def _get_jobinfo_defaults(self, instance): # "product_type": product_type } ) - if profile: - attr_values = profile - return attr_values + return profile or {} class CollectMayaJobInfo(CollectJobInfo): From 1352ad0859d40ab4aa77e603bc88bb3440b42615 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 11:44:19 +0100 Subject: [PATCH 039/153] Removed unneeded variable --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 8b3267171e..f233557740 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -49,8 +49,6 @@ def get_attr_defs_for_instance(cls, create_context, instance): if not cls.instance_matches_plugin_families(instance): return [] - # will be reworked when CreateContext contains settings and task types - project_name = create_context.project_name project_settings = ( create_context.get_current_project_settings() ) From 1e9a34914b627fed7e0710684a130be63067cf39 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 11:57:05 +0100 Subject: [PATCH 040/153] Refactored querying of profiles --- .../plugins/publish/global/collect_jobinfo.py | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index f233557740..153b8a5b86 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -30,6 +30,8 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): families = FARM_FAMILIES targets = ["local"] + profiles = [] + def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) @@ -44,21 +46,14 @@ def process(self, instance): instance.data["deadline"]["job_info"] = job_info @classmethod - def get_attr_defs_for_instance(cls, create_context, instance): - cls.log.info(create_context.get_current_task_entity()) - if not cls.instance_matches_plugin_families(instance): - return [] + def apply_settings(cls, project_settings): + settings = project_settings["deadline"] + profiles = settings["publish"][cls.__name__]["profiles"] - project_settings = ( - create_context.get_current_project_settings() - ) - - profiles = ( - project_settings["deadline"]["publish"][cls.__name__]["profiles"]) - - if not profiles: - return [] + cls.profiles = profiles or [] + @classmethod + def get_attr_defs_for_instance(cls, create_context, instance): host_name = create_context.host_name task_name = instance["task"] @@ -66,7 +61,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): task_entity = create_context.get_task_entity(folder_path, task_name) profile = filter_profiles( - profiles, + cls.profiles, { "host_names": host_name, "task_types": task_entity["taskType"], From a692796d810563d3375ce51ada218090449b0a30 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 12:38:39 +0100 Subject: [PATCH 041/153] Refactor Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 153b8a5b86..913b4da24f 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -69,8 +69,10 @@ def get_attr_defs_for_instance(cls, create_context, instance): # "product_type": product_type } ) + if not profile: + return [] overrides = set(profile["overrides"]) - if not profile or not overrides: + if not overrides: return [] defs = [ From 67de670619f2d8887c39ecf7a669734c4731b833 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 12:43:13 +0100 Subject: [PATCH 042/153] Refactored task values --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 153b8a5b86..cae702dbb6 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -60,11 +60,15 @@ def get_attr_defs_for_instance(cls, create_context, instance): folder_path = instance["folderPath"] task_entity = create_context.get_task_entity(folder_path, task_name) + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] profile = filter_profiles( cls.profiles, { "host_names": host_name, - "task_types": task_entity["taskType"], + "task_types": task_type, "task_names": task_name, # "product_type": product_type } From dbd930474b7916bd80549525ebe3cf17fc16a061 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 14:14:25 +0100 Subject: [PATCH 043/153] Removed magic conversion from Settings to fields Fields from Settings are explicitly mapped. The idea is to remove AYONDeadlineJobInfo if possible and prepare DeadlineJobInfo --- .../ayon_deadline/abstract_submit_deadline.py | 24 +-- client/ayon_deadline/lib.py | 35 +--- .../plugins/publish/global/collect_jobinfo.py | 30 +++- .../publish/maya/submit_maya_deadline.py | 152 +----------------- 4 files changed, 47 insertions(+), 194 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 01b69060f6..565d98505d 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -123,7 +123,8 @@ def process(self, instance): self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() - self.apply_additional_info(job_info) + plugin_info_data = instance.data["deadline"]["plugin_info_data"] + self.apply_additional_plugin_info(plugin_info_data) job_id = self.process_submission() self.log.info("Submitted job to Deadline: {}.".format(job_id)) @@ -188,30 +189,17 @@ def get_generic_job_info(self, instance): for dependency in dependencies: job_info.AssetDependency += dependency - machine_list = job_info.MachineList - if machine_list: - if job_info.MachineListDeny: - job_info.Blacklist = machine_list - else: - job_info.Whitelist = machine_list - # Set job environment variables job_info.add_instance_job_env_vars(instance) job_info.add_render_job_env_var() return job_info - def apply_additional_info(self, job_info): + def apply_additional_plugin_info(self, plugin_info_data): """Adds additional fields and values which aren't explicitly impl.""" - if job_info.AdditionalJobInfo: - for key, value in json.loads(job_info.AdditionalJobInfo).items(): - setattr(self.job_info, key, value) - - if job_info.AdditionalPluginInfo: - plugin_info = json.loads(job_info.AdditionalPluginInfo) - for key, value in plugin_info.items(): - # self.plugin_info is dict, should it be? - self.plugin_info[key] = value + for key, value in plugin_info_data.items(): + # self.plugin_info is dict, should it be? + self.plugin_info[key] = value @abstractmethod def get_job_info(self, job_info=None, **kwargs): diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 669e1b5f47..e8221f50c1 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -330,13 +330,6 @@ class AYONDeadlineJobInfo(DeadlineJobInfo): UseAssetDependencies: Optional[bool] = field(default=None) UseWorkfileDependency: Optional[bool] = field(default=None) - MachineList: Optional[str] = field( - default=None) # Default blank (comma-separated list) - MachineListDeny: Optional[bool] = field(default=None) - - AdditionalJobInfo: Optional[str] = field(default=None) # Default: blank - AdditionalPluginInfo: Optional[str] = field(default=None) # Default: blank - def serialize(self): """Return all data serialized as dictionary. @@ -375,27 +368,15 @@ def filter_data(a, v): @classmethod def from_dict(cls, data: Dict) -> 'JobInfo': - def capitalize(key): - """Transform AYON looking variables from Settings to DL looking. - - AYON uses python like variable names, eg use_published, DL JobInfo - uses capitalized, eg. UsePublished. - This method does the conversion based on this assumption. - """ - words = key.split("_") - return "".join(word.capitalize() for word in words) - - # Filter the dictionary to only include keys that are fields in the dataclass - capitalized = {capitalize(k): v for k, v in data.items()} - all_fields = set( - DeadlineJobInfo.__annotations__).union(set(cls.__annotations__) - ) - filtered_data = { - k: v for k, v - in capitalized.items() - if k in all_fields + implemented_field_values = { + "ChunkSize": data["chunk_size"], + "Priority": data["priority"], + "MachineLimit": data["machine_limit"], + "ConcurrentTasks": data["concurrent_tasks"], + "Frames": data["frames"] } - return cls(**filtered_data) + + return cls(**implemented_field_values) def add_render_job_env_var(self): """Add required env vars for valid render job submission.""" diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 732c38248a..b299a3d330 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +import json + import pyblish.api from ayon_core.lib import ( BoolDef, @@ -11,7 +13,10 @@ from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.lib.profiles_filtering import filter_profiles -from ayon_deadline.lib import FARM_FAMILIES, AYONDeadlineJobInfo +from ayon_deadline.lib import ( + FARM_FAMILIES, + AYONDeadlineJobInfo, +) class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): @@ -43,8 +48,31 @@ def process(self, instance): if value != "" } job_info = AYONDeadlineJobInfo.from_dict(attr_values) + + self._handle_machine_list(attr_values, job_info) + + self._handle_additional_jobinfo(attr_values, job_info) + instance.data["deadline"]["job_info"] = job_info + # pass through explicitly key and values for PluginInfo + plugin_info_data = json.loads(attr_values["additional_plugin_info"]) + instance.data["deadline"]["plugin_info_data"] = plugin_info_data + + def _handle_additional_jobinfo(self,attr_values, job_info): + """Adds not explicitly implemented fields by values from Settings.""" + additional_job_info = attr_values["additional_job_info"] + for key, value in json.loads(additional_job_info).items(): + setattr(job_info, key, value) + + def _handle_machine_list(self, attr_values, job_info): + machine_list = attr_values["machine_list"] + if machine_list: + if job_info.MachineListDeny: + job_info.Blacklist = machine_list + else: + job_info.Whitelist = machine_list + @classmethod def apply_settings(cls, project_settings): settings = project_settings["deadline"] diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index b0786b25eb..be06a28114 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -18,8 +18,6 @@ from __future__ import print_function import os -import json -import getpass import copy import re import hashlib @@ -45,7 +43,7 @@ from ayon_core.pipeline.farm.tools import iter_expected_files from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo +from ayon_deadline.lib import FARM_FAMILIES def _validate_deadline_bool_value(instance, attribute, value): @@ -109,64 +107,14 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, settings_category = "deadline" tile_assembler_plugin = "DraftTileAssembler" - priority = 50 - tile_priority = 50 - limit = [] # limit groups - jobInfo = {} pluginInfo = {} - group = "none" - strict_error_checking = True - - @classmethod - def apply_settings(cls, project_settings): - settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa - - # Take some defaults from settings - cls.asset_dependencies = settings.get("asset_dependencies", - cls.asset_dependencies) - cls.import_reference = settings.get("import_reference", - cls.import_reference) - cls.use_published = settings.get("use_published", cls.use_published) - cls.priority = settings.get("priority", cls.priority) - cls.tile_priority = settings.get("tile_priority", cls.tile_priority) - cls.limit = settings.get("limit", cls.limit) - cls.group = settings.get("group", cls.group) - cls.strict_error_checking = settings.get("strict_error_checking", - cls.strict_error_checking) - job_info = settings.get("jobInfo") - if job_info: - job_info = json.loads(job_info) - plugin_info = settings.get("pluginInfo") - if plugin_info: - plugin_info = json.loads(plugin_info) - - cls.jobInfo = job_info or cls.jobInfo - cls.pluginInfo = plugin_info or cls.pluginInfo - - def get_job_info(self): - job_info = DeadlineJobInfo(Plugin="MayaBatch") - - # todo: test whether this works for existing production cases - # where custom jobInfo was stored in the project settings - job_info.update(self.jobInfo) - instance = self._instance - context = instance.context + def get_job_info(self, job_info=None): + job_info.Plugin = "MayaBatch" - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. - src_filepath = context.data["currentFile"] - src_filename = os.path.basename(src_filepath) - - if is_in_tests(): - src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + instance = self._instance - job_info.Name = "%s - %s" % (src_filename, instance.name) - job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") - job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # Deadline requires integers in frame range frames = "{start}-{end}x{step}".format( @@ -176,55 +124,6 @@ def get_job_info(self): ) job_info.Frames = frames - job_info.Pool = instance.data.get("primaryPool") - job_info.SecondaryPool = instance.data.get("secondaryPool") - job_info.Comment = context.data.get("comment") - job_info.Priority = instance.data.get("priority", self.priority) - - if self.group != "none" and self.group: - job_info.Group = self.group - - if self.limit: - job_info.LimitGroups = ",".join(self.limit) - - attr_values = self.get_attr_values_from_data(instance.data) - render_globals = instance.data.setdefault("renderGlobals", dict()) - machine_list = attr_values.get("machineList", "") - if machine_list: - if attr_values.get("whitelist", True): - machine_list_key = "Whitelist" - else: - machine_list_key = "Blacklist" - render_globals[machine_list_key] = machine_list - - job_info.Priority = attr_values.get("priority") - job_info.ChunkSize = attr_values.get("chunkSize") - - # Add options from RenderGlobals - render_globals = instance.data.get("renderGlobals", {}) - job_info.update(render_globals) - - # Set job environment variables - job_info.add_render_job_env_var() - job_info.add_instance_job_env_vars(self._instance) - - # to recognize render jobs - job_info.add_render_job_env_var() - job_info.EnvironmentKeyValue["AYON_LOG_NO_COLORS"] = "1" - - # Adding file dependencies. - if not is_in_tests() and self.asset_dependencies: - dependencies = instance.context.data["fileDependencies"] - for dependency in dependencies: - job_info.AssetDependency += dependency - - # Add list of expected files to job - # --------------------------------- - exp = instance.data.get("expectedFiles") - for filepath in iter_expected_files(exp): - job_info.OutputDirectory += os.path.dirname(filepath) - job_info.OutputFilename += os.path.basename(filepath) - return job_info def get_plugin_info(self): @@ -265,10 +164,6 @@ def get_plugin_info(self): plugin_payload = attr.asdict(plugin_info) - # Patching with pluginInfo from settings - for key, value in self.pluginInfo.items(): - plugin_payload[key] = value - return plugin_payload def process_submission(self): @@ -756,45 +651,6 @@ def _job_info_label(self, label): end=int(self._instance.data["frameEndHandle"]), ) - @classmethod - def get_attribute_defs(cls): - defs = super(MayaSubmitDeadline, cls).get_attribute_defs() - - defs.extend([ - NumberDef("priority", - label="Priority", - default=cls.default_priority, - decimals=0), - NumberDef("chunkSize", - label="Frames Per Task", - default=1, - decimals=0, - minimum=1, - maximum=1000), - TextDef("machineList", - label="Machine List", - default="", - placeholder="machine1,machine2"), - EnumDef("whitelist", - label="Machine List (Allow/Deny)", - items={ - True: "Allow List", - False: "Deny List", - }, - default=False), - NumberDef("tile_priority", - label="Tile Assembler Priority", - decimals=0, - default=cls.tile_priority), - BoolDef("strict_error_checking", - label="Strict Error Checking", - default=cls.strict_error_checking), - - ]) - - return defs - - def _format_tiles( filename, index, From 5fa10346e64fea642a945e744965469e69b6e908 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 14:30:07 +0100 Subject: [PATCH 044/153] Implemented UsePublished Points to published workfile to be rendered (usually). --- .../ayon_deadline/abstract_submit_deadline.py | 38 ++++++++++--------- client/ayon_deadline/lib.py | 7 +++- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 565d98505d..74ec70bc84 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -81,7 +81,6 @@ class AbstractSubmitDeadline( order = pyblish.api.IntegratorOrder + 0.1 import_reference = False - use_published = True asset_dependencies = False default_priority = 50 @@ -102,27 +101,14 @@ def process(self, instance): assert self._deadline_url, "Requires Deadline Webservice URL" - file_path = None - if self.use_published: - if not self.import_reference: - file_path = self.from_published_scene() - else: - self.log.info("use the scene with imported reference for rendering") # noqa - file_path = context.data["currentFile"] - - # fallback if nothing was set - if not file_path: - self.log.warning("Falling back to workfile") - file_path = context.data["currentFile"] - - self.scene_path = file_path - self.log.info("Using {} for render/export.".format(file_path)) - job_info = self.get_generic_job_info(instance) self.job_info = self.get_job_info(job_info) self.plugin_info = self.get_plugin_info() self.aux_files = self.get_aux_files() + self._set_scene_path( + context.data["currentFile"], job_info.UsePublished) + plugin_info_data = instance.data["deadline"]["plugin_info_data"] self.apply_additional_plugin_info(plugin_info_data) @@ -145,6 +131,24 @@ def process(self, instance): render_job_id = self.submit(payload, auth, verify) self.log.info("Render job id: %s", render_job_id) + def _set_scene_path(self, current_file, use_published): + """Points which workfile should be rendered""" + file_path = None + if use_published: + if not self.import_reference: # TODO remove or implement + file_path = self.from_published_scene() + else: + self.log.info( + "use the scene with imported reference for rendering") + file_path = current_file + + # fallback if nothing was set + if not file_path: + self.log.warning("Falling back to workfile") + file_path = current_file + self.scene_path = file_path + self.log.info("Using {} for render/export.".format(file_path)) + def process_submission(self): """Process data for submission. diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index e8221f50c1..a808f6050e 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -373,7 +373,12 @@ def from_dict(cls, data: Dict) -> 'JobInfo': "Priority": data["priority"], "MachineLimit": data["machine_limit"], "ConcurrentTasks": data["concurrent_tasks"], - "Frames": data["frames"] + "Frames": data["frames"], + + # fields needed for logic, values unavailable during collection + "UsePublished": data["use_published"], + "UseAssetDependencies": data["use_asset_dependencies"], + "UseWorkfileDependency": data["use_workfile_dependency"] } return cls(**implemented_field_values) From 6330f0752dbcdd67aa8a4f5f5e96f560bdc30f41 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 14:31:24 +0100 Subject: [PATCH 045/153] Removed unneeded default_priority --- client/ayon_deadline/abstract_submit_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 74ec70bc84..a6272f0d2f 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -82,7 +82,6 @@ class AbstractSubmitDeadline( import_reference = False asset_dependencies = False - default_priority = 50 def __init__(self, *args, **kwargs): super(AbstractSubmitDeadline, self).__init__(*args, **kwargs) From 7221315efa80228d3b792787e1184bfe86776d8c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 14:34:10 +0100 Subject: [PATCH 046/153] Removed unneeded asset_dependencies Replaced by job_info.UseAssetDependencies --- client/ayon_deadline/abstract_submit_deadline.py | 1 - .../ayon_deadline/plugins/publish/maya/submit_maya_deadline.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index a6272f0d2f..762d8688f5 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -81,7 +81,6 @@ class AbstractSubmitDeadline( order = pyblish.api.IntegratorOrder + 0.1 import_reference = False - asset_dependencies = False def __init__(self, *args, **kwargs): super(AbstractSubmitDeadline, self).__init__(*args, **kwargs) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index be06a28114..1ddbb88537 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -461,7 +461,7 @@ def _get_maya_payload(self, data): job_info = copy.deepcopy(self.job_info) - if not is_in_tests() and self.asset_dependencies: + if not is_in_tests() and self.job_info.UseAssetDependencies: # Asset dependency to wait for at least the scene file to sync. job_info.AssetDependency += self.scene_path From 436f51f168d1781d2458b4fb95ba223ee963dfae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 14:36:17 +0100 Subject: [PATCH 047/153] Replaced missed use_published --- .../plugins/publish/maya/submit_maya_deadline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index 1ddbb88537..bfa44ac047 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -4,7 +4,7 @@ This module is taking care of submitting job from Maya to Deadline. It creates job and set correct environments. Its behavior is controlled by ``DEADLINE_REST_URL`` environment variable - pointing to Deadline Web Service -and :data:`MayaSubmitDeadline.use_published` property telling Deadline to +and :data:`AYONDeadlineJobInfo.UsePublished` property telling Deadline to use published scene workfile or not. If ``vrscene`` or ``assscene`` are detected in families, it will first @@ -179,8 +179,8 @@ def process_submission(self): output_dir = os.path.dirname(first_file) instance.data["outputDir"] = output_dir - # Patch workfile (only when use_published is enabled) - if self.use_published: + # Patch workfile (only when UsePublished is enabled) + if self.job_info.UsePublished: self._patch_workfile() # Gather needed data ------------------------------------------------ From 02e0c85eb9b90580f5ddde0ee7a191d405b0ca05 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:03:03 +0100 Subject: [PATCH 048/153] Updates for Maya to show attr defs --- .../plugins/publish/global/collect_jobinfo.py | 40 ++++++++----------- 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index b299a3d330..f79b607e69 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -41,12 +41,6 @@ def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) attr_values.update(self.get_attr_values_from_data(instance.data)) - # do not set empty strings - attr_values = { - key: value - for key,value in attr_values.items() - if value != "" - } job_info = AYONDeadlineJobInfo.from_dict(attr_values) self._handle_machine_list(attr_values, job_info) @@ -123,6 +117,8 @@ def get_attr_defs_for_instance(cls, create_context, instance): ) ) + defs = cls._host_specific_attr_defs(create_context, instance, defs) + defs.append( UISeparatorDef("deadline_defs_end") ) @@ -238,25 +234,21 @@ def _get_jobinfo_defaults(self, instance): ) return profile or {} - -class CollectMayaJobInfo(CollectJobInfo): - hosts = [ - "maya", - ] @classmethod - def get_attr_defs_for_instance(cls, create_context, instance): - defs = super().get_attr_defs_for_instance(create_context, instance) + def _host_specific_attr_defs(cls, create_context, instance, defs): - defs.extend([ - NumberDef( - "tile_priority", - label="Tile Assembler Priority", - decimals=0, - ), - BoolDef( - "strict_error_checking", - label="Strict Error Checking", - ), - ]) + host_name = create_context.host_name + if host_name == "maya": + defs.extend([ + NumberDef( + "tile_priority", + label="Tile Assembler Priority", + decimals=0, + ), + BoolDef( + "strict_error_checking", + label="Strict Error Checking", + ), + ]) return defs From 0a22384ab44dee5b9be62091ba4adfc28da2156f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:03:31 +0100 Subject: [PATCH 049/153] Formatting to separate profile from actual presets --- server/settings/publish_plugins.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index c383f27f54..b939004fd2 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -64,6 +64,8 @@ class CollectJobInfoItem(BaseSettingsModel): title="Task names" ) + ######################################### + chunk_size: int = SettingsField(999, title="Frames per Task") priority: int = SettingsField(50, title="Priority") group: str = SettingsField("", title="Group") From 1e71e4410ed7055520f2841d99221da18f2dba8d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:09:31 +0100 Subject: [PATCH 050/153] Moved resolution of scene_path earlier Must be prepared for PluginInfo --- client/ayon_deadline/abstract_submit_deadline.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 762d8688f5..b66eb4b8a2 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -101,11 +101,12 @@ def process(self, instance): job_info = self.get_generic_job_info(instance) self.job_info = self.get_job_info(job_info) - self.plugin_info = self.get_plugin_info() - self.aux_files = self.get_aux_files() self._set_scene_path( context.data["currentFile"], job_info.UsePublished) + self.plugin_info = self.get_plugin_info() + + self.aux_files = self.get_aux_files() plugin_info_data = instance.data["deadline"]["plugin_info_data"] self.apply_additional_plugin_info(plugin_info_data) From 69e94148bb9315773d323d588ff1f5e6bee9aedd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:11:42 +0100 Subject: [PATCH 051/153] Removed unneeded plugin_info class variable --- .../ayon_deadline/plugins/publish/maya/submit_maya_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index bfa44ac047..4233bc8837 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -107,7 +107,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, settings_category = "deadline" tile_assembler_plugin = "DraftTileAssembler" - pluginInfo = {} def get_job_info(self, job_info=None): job_info.Plugin = "MayaBatch" From 18b176493cd094a1486cfb206ae494c81d129815 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:13:08 +0100 Subject: [PATCH 052/153] Removed unused import --- client/ayon_deadline/lib.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index a808f6050e..5c109b6d4a 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,4 +1,3 @@ -import inspect import os from dataclasses import dataclass, field, asdict from functools import partial From d01479a9654d843a402f85e068c9b321b08e3da7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:17:57 +0100 Subject: [PATCH 053/153] Refactored Blender submission --- .../blender/submit_blender_deadline.py | 127 +----------------- 1 file changed, 4 insertions(+), 123 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py b/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py index b155c02089..1d4c943931 100644 --- a/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py +++ b/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py @@ -2,21 +2,12 @@ """Submitting render job to Deadline.""" import os -import getpass import attr -from datetime import datetime - -from ayon_core.lib import ( - BoolDef, - NumberDef, - TextDef, - is_in_tests, -) + from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.pipeline.farm.tools import iter_expected_files from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s @@ -30,39 +21,12 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, AYONPyblishPluginMixin): label = "Submit Render to Deadline" hosts = ["blender"] - families = ["render"] + families = ["render"] # TODO this should be farm specific as render.farm settings_category = "deadline" - use_published = True - priority = 50 - chunk_size = 1 - jobInfo = {} - pluginInfo = {} - group = None - job_delay = "00:00:00:00" - - def get_job_info(self): - job_info = DeadlineJobInfo(Plugin="Blender") - - job_info.update(self.jobInfo) - + def get_job_info(self, job_info=None): instance = self._instance - context = instance.context - - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. - src_filepath = context.data["currentFile"] - src_filename = os.path.basename(src_filepath) - - if is_in_tests(): - src_filename += datetime.now().strftime("%d%m%Y%H%M%S") - - job_info.Name = f"{src_filename} - {instance.name}" - job_info.BatchName = src_filename - instance.data.get("blenderRenderPlugin", "Blender") - job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) + job_info.Plugin = instance.data.get("blenderRenderPlugin", "Blender") # Deadline requires integers in frame range frames = "{start}-{end}x{step}".format( @@ -72,49 +36,6 @@ def get_job_info(self): ) job_info.Frames = frames - job_info.Pool = instance.data.get("primaryPool") - job_info.SecondaryPool = instance.data.get("secondaryPool") - job_info.Comment = instance.data.get("comment") - - if self.group != "none" and self.group: - job_info.Group = self.group - - attr_values = self.get_attr_values_from_data(instance.data) - render_globals = instance.data.setdefault("renderGlobals", {}) - machine_list = attr_values.get("machineList", "") - if machine_list: - if attr_values.get("whitelist", True): - machine_list_key = "Whitelist" - else: - machine_list_key = "Blacklist" - render_globals[machine_list_key] = machine_list - - job_info.ChunkSize = attr_values.get("chunkSize", self.chunk_size) - job_info.Priority = attr_values.get("priority", self.priority) - job_info.ScheduledType = "Once" - job_info.JobDelay = attr_values.get("job_delay", self.job_delay) - - # Add options from RenderGlobals - render_globals = instance.data.get("renderGlobals", {}) - job_info.update(render_globals) - - # Set job environment variables - job_info.add_instance_job_env_vars(self._instance) - job_info.add_render_job_env_var() - - # Adding file dependencies. - if self.asset_dependencies: - dependencies = instance.context.data["fileDependencies"] - for dependency in dependencies: - job_info.AssetDependency += dependency - - # Add list of expected files to job - # --------------------------------- - exp = instance.data.get("expectedFiles") - for filepath in iter_expected_files(exp): - job_info.OutputDirectory += os.path.dirname(filepath) - job_info.OutputFilename += os.path.basename(filepath) - return job_info def get_plugin_info(self): @@ -129,10 +50,6 @@ def get_plugin_info(self): plugin_payload = attr.asdict(plugin_info) - # Patching with pluginInfo from settings - for key, value in self.pluginInfo.items(): - plugin_payload[key] = value - return plugin_payload def process_submission(self, auth=None): @@ -160,39 +77,3 @@ def from_published_scene(self): the metadata and the rendered files are in the same location. """ return super().from_published_scene(False) - - @classmethod - def get_attribute_defs(cls): - defs = super(BlenderSubmitDeadline, cls).get_attribute_defs() - defs.extend([ - BoolDef("use_published", - default=cls.use_published, - label="Use Published Scene"), - - NumberDef("priority", - minimum=1, - maximum=250, - decimals=0, - default=cls.priority, - label="Priority"), - - NumberDef("chunkSize", - minimum=1, - maximum=50, - decimals=0, - default=cls.chunk_size, - label="Frame Per Task"), - - TextDef("group", - default=cls.group, - label="Group Name"), - - TextDef("job_delay", - default=cls.job_delay, - label="Job Delay", - placeholder="dd:hh:mm:ss", - tooltip="Delay the job by the specified amount of time. " - "Timecode: dd:hh:mm:ss."), - ]) - - return defs From 2db84180afcb85372f6fb056b8195b3b7af5c1d6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:18:32 +0100 Subject: [PATCH 054/153] Refactored querying of plugin Could be dynamic value set in instance --- .../ayon_deadline/plugins/publish/maya/submit_maya_deadline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index 4233bc8837..be995e715c 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -109,8 +109,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, tile_assembler_plugin = "DraftTileAssembler" def get_job_info(self, job_info=None): - job_info.Plugin = "MayaBatch" - instance = self._instance job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") From 3bf017f695de96b73d95ee1bb5e4d41b82a5f85b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 15:39:11 +0100 Subject: [PATCH 055/153] Implemented in Harmony --- .../harmony/submit_harmony_deadline.py | 33 +------------------ 1 file changed, 1 insertion(+), 32 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/harmony/submit_harmony_deadline.py b/client/ayon_deadline/plugins/publish/harmony/submit_harmony_deadline.py index f5e00b4a6a..cdee302afc 100644 --- a/client/ayon_deadline/plugins/publish/harmony/submit_harmony_deadline.py +++ b/client/ayon_deadline/plugins/publish/harmony/submit_harmony_deadline.py @@ -5,14 +5,11 @@ from collections import OrderedDict from zipfile import ZipFile, is_zipfile import re -from datetime import datetime import attr import pyblish.api from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo -from ayon_core.lib import is_in_tests class _ZipFile(ZipFile): @@ -242,40 +239,12 @@ class HarmonySubmitDeadline( targets = ["local"] settings_category = "deadline" - optional = True - use_published = False - priority = 50 - chunk_size = 1000000 - group = "none" - department = "" - - def get_job_info(self): - job_info = DeadlineJobInfo("Harmony") - job_info.Name = self._instance.data["name"] + def get_job_info(self, job_info=None): job_info.Plugin = "HarmonyAYON" job_info.Frames = "{}-{}".format( self._instance.data["frameStartHandle"], self._instance.data["frameEndHandle"] ) - # for now, get those from presets. Later on it should be - # configurable in Harmony UI directly. - job_info.Priority = self.priority - job_info.Pool = self._instance.data.get("primaryPool") - job_info.SecondaryPool = self._instance.data.get("secondaryPool") - job_info.ChunkSize = self.chunk_size - batch_name = os.path.basename(self._instance.data["source"]) - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - job_info.BatchName = batch_name - job_info.Department = self.department - job_info.Group = self.group - - # Set job environment variables - job_info.add_render_job_env_var() - job_info.add_instance_job_env_vars(self._instance) - - # to recognize render jobs - job_info.add_render_job_env_var() return job_info From 91b91017cb1e231f16e3467687a3f409d2e5702f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 17:38:47 +0100 Subject: [PATCH 056/153] Implemented in Max --- .../publish/max/submit_max_deadline.py | 133 +----------------- 1 file changed, 6 insertions(+), 127 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py b/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py index d18c222591..0739da0b93 100644 --- a/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py +++ b/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py @@ -1,13 +1,7 @@ import os -import getpass import copy import attr -from ayon_core.lib import ( - TextDef, - BoolDef, - NumberDef, -) from ayon_core.pipeline import ( AYONPyblishPluginMixin ) @@ -21,7 +15,6 @@ ) from ayon_max.api.lib_rendersettings import RenderSettings from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s @@ -41,45 +34,11 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, targets = ["local"] settings_category = "deadline" - use_published = True - priority = 50 - chunk_size = 1 - jobInfo = {} - pluginInfo = {} - group = None - - @classmethod - def apply_settings(cls, project_settings): - settings = project_settings["deadline"]["publish"]["MaxSubmitDeadline"] # noqa - - # Take some defaults from settings - cls.use_published = settings.get("use_published", - cls.use_published) - cls.priority = settings.get("priority", - cls.priority) - cls.chuck_size = settings.get("chunk_size", cls.chunk_size) - cls.group = settings.get("group", cls.group) - # TODO: multiple camera instance, separate job infos - def get_job_info(self): - job_info = DeadlineJobInfo(Plugin="3dsmax") - - # todo: test whether this works for existing production cases - # where custom jobInfo was stored in the project settings - job_info.update(self.jobInfo) + def get_job_info(self, job_info=None): instance = self._instance - context = instance.context - # Always use the original work file name for the Job name even when - # rendering is done from the published Work File. The original work - # file name is clearer because it can also have subversion strings, - # etc. which are stripped for the published file. + job_info.Plugin = instance.data.get("plugin") or "3dsmax" - src_filepath = context.data["currentFile"] - src_filename = os.path.basename(src_filepath) - job_info.Name = "%s - %s" % (src_filename, instance.name) - job_info.BatchName = src_filename - job_info.Plugin = instance.data["plugin"] - job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) job_info.EnableAutoTimeout = True # Deadline requires integers in frame range frames = "{start}-{end}".format( @@ -88,57 +47,10 @@ def get_job_info(self): ) job_info.Frames = frames - job_info.Pool = instance.data.get("primaryPool") - job_info.SecondaryPool = instance.data.get("secondaryPool") - - attr_values = self.get_attr_values_from_data(instance.data) - - job_info.ChunkSize = attr_values.get("chunkSize", 1) - job_info.Comment = context.data.get("comment") - job_info.Priority = attr_values.get("priority", self.priority) - job_info.Group = attr_values.get("group", self.group) - - # Add options from RenderGlobals - render_globals = instance.data.get("renderGlobals", {}) - job_info.update(render_globals) - - keys = [ - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "OPENPYPE_SG_USER", - "AYON_BUNDLE_NAME", - "AYON_DEFAULT_SETTINGS_VARIANT", - "AYON_PROJECT_NAME", - "AYON_FOLDER_PATH", - "AYON_TASK_NAME", - "AYON_WORKDIR", - "AYON_APP_NAME", - "AYON_IN_TESTS", - ] - - environment = { - key: os.environ[key] - for key in keys - if key in os.environ - } - - for key in keys: - value = environment.get(key) - if not value: - continue - job_info.EnvironmentKeyValue[key] = value - - job_info.add_instance_job_env_vars(self._instance) - job_info.add_render_job_env_var() - - # Add list of expected files to job - # --------------------------------- - if not instance.data.get("multiCamera"): - exp = instance.data.get("expectedFiles") - for filepath in self._iter_expected_files(exp): - job_info.OutputDirectory += os.path.dirname(filepath) - job_info.OutputFilename += os.path.basename(filepath) + # do not add expected files for multiCamera + if instance.data.get("multiCamera"): + job_info.OutputDirectory = None + job_info.OutputFilename = None return job_info @@ -154,10 +66,6 @@ def get_plugin_info(self): plugin_payload = attr.asdict(plugin_info) - # Patching with pluginInfo from settings - for key, value in self.pluginInfo.items(): - plugin_payload[key] = value - return plugin_payload def process_submission(self): @@ -400,32 +308,3 @@ def _iter_expected_files(exp): else: for file in exp: yield file - - @classmethod - def get_attribute_defs(cls): - defs = super(MaxSubmitDeadline, cls).get_attribute_defs() - defs.extend([ - BoolDef("use_published", - default=cls.use_published, - label="Use Published Scene"), - - NumberDef("priority", - minimum=1, - maximum=250, - decimals=0, - default=cls.priority, - label="Priority"), - - NumberDef("chunkSize", - minimum=1, - maximum=50, - decimals=0, - default=cls.chunk_size, - label="Frame Per Task"), - - TextDef("group", - default=cls.group, - label="Group Name"), - ]) - - return defs From 567ed8c05e40c6acc2c21fd22f5d3e4dbb829f77 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 17:40:08 +0100 Subject: [PATCH 057/153] Added comments and pools --- client/ayon_deadline/abstract_submit_deadline.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index b66eb4b8a2..74915bd0f6 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -180,6 +180,11 @@ def get_generic_job_info(self, instance): job_info.Name = "%s - %s" % (batch_name, instance.name) job_info.BatchName = batch_name job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser + job_info.Comment = context.data.get("comment") + + # TODO switch to job_info collector + job_info.Pool = instance.data.get("primaryPool") + job_info.SecondaryPool = instance.data.get("secondaryPool") exp = instance.data.get("expectedFiles") for filepath in iter_expected_files(exp): From 150bcd7945658773109d6505750e70d5054fb70f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 17:46:20 +0100 Subject: [PATCH 058/153] Refactored JobInfo for Unreal --- .../publish/unreal/submit_unreal_deadline.py | 53 ++++--------------- 1 file changed, 10 insertions(+), 43 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py b/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py index cbc840575f..79a6eb1dac 100644 --- a/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py +++ b/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py @@ -1,6 +1,5 @@ import os import attr -import getpass import pyblish.api from datetime import datetime from pathlib import Path @@ -8,7 +7,6 @@ from ayon_core.lib import is_in_tests from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s @@ -42,53 +40,22 @@ class UnrealSubmitDeadline( order = pyblish.api.IntegratorOrder + 0.1 hosts = ["unreal"] families = ["render.farm"] # cannot be "render' as that is integrated - use_published = True targets = ["local"] - priority = 50 - chunk_size = 1000000 - group = None - department = None - multiprocess = True + def get_job_info(self, job_info=None): + instance = self._instance - def get_job_info(self): - dln_job_info = DeadlineJobInfo(Plugin="UnrealEngine5") + job_info.BatchName = self._get_batch_name() + job_info.Plugin = "UnrealEngine5" - context = self._instance.context - - batch_name = self._get_batch_name() - dln_job_info.Name = self._instance.data["name"] - dln_job_info.BatchName = batch_name - dln_job_info.Plugin = "UnrealEngine5" - dln_job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) - if self._instance.data["frameEnd"] > self._instance.data["frameStart"]: + if instance.data["frameEnd"] > instance.data["frameStart"]: # Deadline requires integers in frame range frame_range = "{}-{}".format( - int(round(self._instance.data["frameStart"])), - int(round(self._instance.data["frameEnd"]))) - dln_job_info.Frames = frame_range - - dln_job_info.Priority = self.priority - dln_job_info.Pool = self._instance.data.get("primaryPool") - dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool") - dln_job_info.Group = self.group - dln_job_info.Department = self.department - dln_job_info.ChunkSize = self.chunk_size - dln_job_info.OutputFilename += \ - os.path.basename(self._instance.data["file_names"][0]) - dln_job_info.OutputDirectory += \ - os.path.dirname(self._instance.data["expectedFiles"][0]) - dln_job_info.JobDelay = "00:00:00" - - dln_job_info.add_instance_job_env_vars(self._instance) - dln_job_info.EnvironmentKeyValue["AYON_UNREAL_VERSION"] = ( - self._instance.data)["app_version"] - - # to recognize render jobs - dln_job_info.add_render_job_env_var() - - return dln_job_info + int(round(instance.data["frameStart"])), + int(round(instance.data["frameEnd"]))) + job_info.Frames = frame_range + + return job_info def get_plugin_info(self): deadline_plugin_info = DeadlinePluginInfo() From 31b71b97cfc3da778501ece041367c4022cca71a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 1 Nov 2024 17:56:18 +0100 Subject: [PATCH 059/153] Refactored JobInfo for Houdini --- .../houdini/submit_houdini_cache_deadline.py | 72 ++-------------- .../houdini/submit_houdini_render_deadline.py | 84 +------------------ 2 files changed, 9 insertions(+), 147 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py index e0b66adee4..cafc027e49 100644 --- a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py +++ b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py @@ -1,5 +1,4 @@ import os -import getpass from datetime import datetime import attr @@ -13,7 +12,6 @@ AYONPyblishPluginMixin ) from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s @@ -45,19 +43,7 @@ class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline targets = ["local"] settings_category = "deadline" - priority = 50 - chunk_size = 999999 - group = None - limits = "" - machine_limit = 0 - jobInfo = {} - pluginInfo = {} - - - def get_job_info(self): - job_info = DeadlineJobInfo(Plugin="Houdini") - - job_info.update(self.jobInfo) + def get_job_info(self, job_info=None): instance = self._instance context = instance.context assert all( @@ -69,15 +55,15 @@ def get_job_info(self): scenename = os.path.basename(filepath) job_name = "{scene} - {instance} [PUBLISH]".format( scene=scenename, instance=instance.name) - batch_name = "{code} - {scene}".format(code=project_name, - scene=scenename) + batch_name = "{code} - {scene}".format( + code=project_name, scene=scenename) if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") job_info.Name = job_name job_info.BatchName = batch_name - job_info.Plugin = instance.data["plugin"] - job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) + job_info.Plugin = instance.data.get("plugin") or "Houdini" + rop_node = self.get_rop_node(instance) if rop_node.type().name() != "alembic": frames = "{start}-{end}x{step}".format( @@ -88,24 +74,6 @@ def get_job_info(self): job_info.Frames = frames - job_info.Pool = instance.data.get("primaryPool") - job_info.SecondaryPool = instance.data.get("secondaryPool") - - attr_values = self.get_attr_values_from_data(instance.data) - - job_info.ChunkSize = instance.data.get("chunk_size", self.chunk_size) - job_info.Comment = context.data.get("comment") - job_info.Priority = attr_values.get("priority", self.priority) - job_info.Group = attr_values.get("group", self.group) - job_info.LimitGroups = attr_values.get("limits", self.limits) - job_info.MachineLimit = attr_values.get( - "machine_limit", self.machine_limit - ) - - # Set job environment variables - job_info.add_instance_job_env_vars(self._instance) - job_info.add_render_job_env_var() - return job_info def get_plugin_info(self): @@ -145,33 +113,3 @@ def get_rop_node(self, instance): rop_node = hou.node(rop) return rop_node - - @classmethod - def get_attribute_defs(cls): - defs = super(HoudiniCacheSubmitDeadline, cls).get_attribute_defs() - defs.extend([ - NumberDef("priority", - minimum=1, - maximum=250, - decimals=0, - default=cls.priority, - label="Priority"), - TextDef("group", - default=cls.group, - label="Group Name"), - TextDef( - "limits", - default=cls.limits, - label="Limit Groups", - placeholder="value1,value2", - tooltip="Enter a comma separated list of limit groups." - ), - NumberDef( - "machine_limit", - default=cls.machine_limit, - label="Machine Limit", - tooltip="maximum number of machines for this job." - ), - ]) - - return defs diff --git a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py index ed2731391a..0e8997313f 100644 --- a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py +++ b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py @@ -1,6 +1,5 @@ import os import attr -import getpass from datetime import datetime import pyblish.api @@ -12,7 +11,6 @@ NumberDef ) from ayon_deadline import abstract_submit_deadline -from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo @attr.s @@ -100,7 +98,6 @@ class HoudiniSubmitDeadline( "vray_rop"] targets = ["local"] settings_category = "deadline" - use_published = True # presets export_priority = 50 @@ -108,47 +105,10 @@ class HoudiniSubmitDeadline( export_group = "" export_limits = "" export_machine_limit = 0 - priority = 50 - chunk_size = 1 - group = "" - limits = "" - machine_limit = 0 @classmethod def get_attribute_defs(cls): return [ - NumberDef( - "priority", - label="Priority", - default=cls.priority, - decimals=0 - ), - NumberDef( - "chunk", - label="Frames Per Task", - default=cls.chunk_size, - decimals=0, - minimum=1, - maximum=1000 - ), - TextDef( - "group", - default=cls.group, - label="Group Name" - ), - TextDef( - "limits", - default=cls.limits, - label="Limit Groups", - placeholder="value1,value2", - tooltip="Enter a comma separated list of limit groups." - ), - NumberDef( - "machine_limit", - default=cls.machine_limit, - label="Machine Limit", - tooltip="maximum number of machines for this job." - ), NumberDef( "export_priority", label="Export Priority", @@ -183,13 +143,11 @@ def get_attribute_defs(cls): ), ] - def get_job_info(self, dependency_job_ids=None): + def get_job_info(self, dependency_job_ids=None, job_info=None): instance = self._instance context = instance.context - attribute_values = self.get_attr_values_from_data(instance.data) - # Whether Deadline render submission is being split in two # (extract + render) split_render_job = instance.data.get("splitRender") @@ -214,17 +172,13 @@ def get_job_info(self, dependency_job_ids=None): plugin = "Houdini" if split_render_job: job_type = "[EXPORT IFD]" - - job_info = DeadlineJobInfo(Plugin=plugin) + job_info.Plugin = plugin filepath = context.data["currentFile"] filename = os.path.basename(filepath) job_info.Name = "{} - {} {}".format(filename, instance.name, job_type) job_info.BatchName = filename - job_info.UserName = context.data.get( - "deadlineUser", getpass.getuser()) - if is_in_tests(): job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S") @@ -244,9 +198,7 @@ def get_job_info(self, dependency_job_ids=None): job_info.IsFrameDependent = bool(instance.data.get( "splitRenderFrameDependent", True)) - job_info.Pool = instance.data.get("primaryPool") - job_info.SecondaryPool = instance.data.get("secondaryPool") - + attribute_values = self.get_attr_values_from_data(instance.data) if split_render_job and is_export_job: job_info.Priority = attribute_values.get( "export_priority", self.export_priority @@ -263,36 +215,8 @@ def get_job_info(self, dependency_job_ids=None): job_info.MachineLimit = attribute_values.get( "export_machine_limit", self.export_machine_limit ) - else: - job_info.Priority = attribute_values.get( - "priority", self.priority - ) - job_info.ChunkSize = attribute_values.get( - "chunk", self.chunk_size - ) - job_info.Group = attribute_values.get( - "group", self.group - ) - job_info.LimitGroups = attribute_values.get( - "limits", self.limits - ) - job_info.MachineLimit = attribute_values.get( - "machine_limit", self.machine_limit - ) - - # Apply render globals, like e.g. data from collect machine list - render_globals = instance.data.get("renderGlobals", {}) - if render_globals: - self.log.debug("Applying 'renderGlobals' to job info: %s", - render_globals) - job_info.update(render_globals) - - job_info.Comment = context.data.get("comment") - - # Set job environment variables - job_info.add_instance_job_env_vars(self._instance) - job_info.add_render_job_env_var() + # TODO change to expectedFiles?? for i, filepath in enumerate(instance.data["files"]): dirname = os.path.dirname(filepath) fname = os.path.basename(filepath) From ce2f93a46dff2f1df4cd66dc81fc50621f159129 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 12:40:01 +0100 Subject: [PATCH 060/153] Reordered defaults This way type validations should kick in for default values. --- .../plugins/publish/global/collect_jobinfo.py | 32 ++++++++----------- 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index f79b607e69..7035927e5c 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -129,11 +129,18 @@ def get_attr_defs_for_instance(cls, create_context, instance): def _get_artist_overrides(cls, overrides, profile): """Provide list of Defs that could be filled by artist""" # should be matching to extract_jobinfo_overrides_enum + default_values = {} + for key in overrides: + default_value = profile[key] + if isinstance(default_value, list): + default_value = ",".join(default_value) + default_values[key] = default_value + override_defs = [ NumberDef( - "chunkSize", + "chunk_size", label="Frames Per Task", - default=1, + default=default_values["chunk_size"], decimals=0, minimum=1, maximum=1000 @@ -141,40 +148,29 @@ def _get_artist_overrides(cls, overrides, profile): NumberDef( "priority", label="Priority", + default=default_values["priority"], decimals=0 ), TextDef( "department", label="Department", - default="", + default=default_values["department"] ), TextDef( "limit_groups", label="Limit Groups", # multiline=True, TODO - some DCC might have issues with storing multi lines - default="", + default=default_values["limit_groups"], placeholder="machine1,machine2" ), TextDef( "job_delay", label="Delay job (timecode dd:hh:mm:ss)", - default="" + default=default_values["job_delay"], ) ] - defs = [] - # The Arguments that can be modified by the Publisher - for attr_def in override_defs: - if attr_def.key not in overrides: - continue - - default_value = profile[attr_def.key] - if (isinstance(attr_def, TextDef) and - isinstance(default_value, list)): - default_value = ",".join(default_value) - attr_def.default = default_value - defs.append(attr_def) - return defs + return override_defs @classmethod def register_create_context_callbacks(cls, create_context): From e39c116755210b2921a5d77fe57816ecaa05ca4f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 13:05:36 +0100 Subject: [PATCH 061/153] Ruff fixes --- client/ayon_deadline/lib.py | 2 +- .../publish/aftereffects/submit_aftereffects_deadline.py | 1 - .../plugins/publish/global/collect_jobinfo.py | 4 ---- .../publish/houdini/submit_houdini_cache_deadline.py | 2 -- .../plugins/publish/maya/submit_maya_deadline.py | 9 +-------- 5 files changed, 2 insertions(+), 16 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 5c109b6d4a..5fe63797c3 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -365,7 +365,7 @@ def filter_data(a, v): return serialized @classmethod - def from_dict(cls, data: Dict) -> 'JobInfo': + def from_dict(cls, data: Dict) -> 'AYONDeadlineJobInfo': implemented_field_values = { "ChunkSize": data["chunk_size"], diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index bc4baffac4..7c99b078b5 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -5,7 +5,6 @@ from ayon_core.lib import ( env_value_to_bool, collect_frames, - is_in_tests, ) from ayon_deadline import abstract_submit_deadline diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 7035927e5c..bef42a24f7 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -6,8 +6,6 @@ BoolDef, NumberDef, TextDef, - EnumDef, - is_in_tests, UISeparatorDef ) from ayon_core.pipeline.publish import AYONPyblishPluginMixin @@ -200,8 +198,6 @@ def _get_jobinfo_defaults(self, instance): Returns: (dict) """ - attr_values = {} - context_data = instance.context.data host_name = context_data["hostName"] project_settings = context_data["project_settings"] diff --git a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py index cafc027e49..1b5281d16b 100644 --- a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py +++ b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py @@ -4,8 +4,6 @@ import attr import pyblish.api from ayon_core.lib import ( - TextDef, - NumberDef, is_in_tests, ) from ayon_core.pipeline import ( diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index be995e715c..582572ba4a 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -30,20 +30,13 @@ from ayon_core.pipeline import ( AYONPyblishPluginMixin ) -from ayon_core.lib import ( - BoolDef, - NumberDef, - TextDef, - EnumDef, - is_in_tests, -) + from ayon_maya.api.lib_rendersettings import RenderSettings from ayon_maya.api.lib import get_attr_in_layer from ayon_core.pipeline.farm.tools import iter_expected_files from ayon_deadline import abstract_submit_deadline -from ayon_deadline.lib import FARM_FAMILIES def _validate_deadline_bool_value(instance, attribute, value): From 96afdfd1d3079411dd9737c152e7ca6ca3f97382 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 13:13:46 +0100 Subject: [PATCH 062/153] Fix missing import --- .../plugins/publish/maya/submit_maya_deadline.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index 582572ba4a..c2c06d4491 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -31,11 +31,12 @@ AYONPyblishPluginMixin ) +from ayon_core.lib import is_in_tests +from ayon_core.pipeline.farm.tools import iter_expected_files + from ayon_maya.api.lib_rendersettings import RenderSettings from ayon_maya.api.lib import get_attr_in_layer -from ayon_core.pipeline.farm.tools import iter_expected_files - from ayon_deadline import abstract_submit_deadline From 7c89b2cd9a2c8f0708a7b331f20dc3f16ae46a80 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 16:44:23 +0100 Subject: [PATCH 063/153] Fix empty additional info --- .../plugins/publish/global/collect_jobinfo.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index bef42a24f7..a298585b1d 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -48,12 +48,17 @@ def process(self, instance): instance.data["deadline"]["job_info"] = job_info # pass through explicitly key and values for PluginInfo - plugin_info_data = json.loads(attr_values["additional_plugin_info"]) + plugin_info_data = None + if attr_values["additional_plugin_info"]: + plugin_info_data = ( + json.loads(attr_values["additional_plugin_info"])) instance.data["deadline"]["plugin_info_data"] = plugin_info_data def _handle_additional_jobinfo(self,attr_values, job_info): """Adds not explicitly implemented fields by values from Settings.""" additional_job_info = attr_values["additional_job_info"] + if not additional_job_info: + return for key, value in json.loads(additional_job_info).items(): setattr(job_info, key, value) From e0f1a296fd430e1e5eff40fa8abd24317ec726a0 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 4 Nov 2024 16:50:30 +0100 Subject: [PATCH 064/153] Refactor Fusion submitter to be based on the AbstractSubmitDeadline plug-in --- .../publish/fusion/submit_fusion_deadline.py | 251 ++++++------------ 1 file changed, 76 insertions(+), 175 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py index 6c1790ebd4..8b444d98d0 100644 --- a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py +++ b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py @@ -1,28 +1,36 @@ import os -import json -import getpass import pyblish.api -from ayon_deadline.abstract_submit_deadline import requests_post -from ayon_deadline.lib import get_ayon_render_job_envs, get_instance_job_envs -from ayon_core.pipeline.publish import ( - AYONPyblishPluginMixin -) -from ayon_core.lib import NumberDef +import attr +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.pipeline.farm.tools import iter_expected_files +from ayon_deadline import abstract_submit_deadline -class FusionSubmitDeadline( - pyblish.api.InstancePlugin, - AYONPyblishPluginMixin -): +@attr.s +class FusionPluginInfo: + FlowFile: str = attr.ib(default=None) # Input + Version: str = attr.ib(default=None) # Mandatory for Deadline + + # Render in high quality + HighQuality: bool = attr.ib(default=True) + # Whether saver output should be checked after rendering + # is complete + CheckOutput: bool = attr.ib(default=True) + # Proxy: higher numbers smaller images for faster test renders + # 1 = no proxy quality + Proxy: int = attr.ib(default=1) + + +class FusionSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, + AYONPyblishPluginMixin): """Submit current Comp to Deadline Renders are submitted to a Deadline Web Service as supplied via settings key "DEADLINE_REST_URL". """ - label = "Submit Fusion to Deadline" order = pyblish.api.IntegratorOrder hosts = ["fusion"] @@ -38,56 +46,32 @@ class FusionSubmitDeadline( concurrent_tasks = 1 group = "" - @classmethod - def get_attribute_defs(cls): - return [ - NumberDef( - "priority", - label="Priority", - default=cls.priority, - decimals=0 - ), - NumberDef( - "chunk", - label="Frames Per Task", - default=cls.chunk_size, - decimals=0, - minimum=1, - maximum=1000 - ), - NumberDef( - "concurrency", - label="Concurrency", - default=cls.concurrent_tasks, - decimals=0, - minimum=1, - maximum=10 - ) - ] - def process(self, instance): - if not instance.data.get("farm"): - self.log.debug("Skipping local instance.") + if not instance.data["farm"]: + self.log.debug("Render on farm is disabled. " + "Skipping deadline submission.") return - attribute_values = self.get_attr_values_from_data( - instance.data) - + # TODO: Avoid this hack and instead use a proper way to submit + # each render per instance individually + # TODO: Also, we should support submitting a job per group of instances + # that are set to a different frame range. Currently we're always + # expecting to render the full frame range for each. Which may mean + # we need multiple render jobs but a single publish job dependent on + # the multiple separate instance jobs? + # We are submitting a farm job not per instance - but once per Fusion + # comp. This is a hack to avoid submitting multiple jobs for each + # saver separately which would be much slower. context = instance.context - key = "__hasRun{}".format(self.__class__.__name__) if context.data.get(key, False): return else: context.data[key] = True - from ayon_fusion.api.lib import get_frame_path - - deadline_url = instance.data["deadline"]["url"] - assert deadline_url, "Requires Deadline Webservice URL" - # Collect all saver instances in context that are to be rendered saver_instances = [] + context = instance.context for inst in context: if inst.data["productType"] != "render": # Allow only saver family instances @@ -103,130 +87,47 @@ def process(self, instance): if not saver_instances: raise RuntimeError("No instances found for Deadline submission") - comment = instance.data.get("comment", "") - deadline_user = context.data.get("deadlineUser", getpass.getuser()) + instance.data["_farmSaverInstances"] = saver_instances - script_path = context.data["currentFile"] + super().process(instance) - anatomy = instance.context.data["anatomy"] - publish_template = anatomy.get_template_item( - "publish", "default", "path" - ) - for item in context: - if "workfile" in item.data["families"]: - msg = "Workfile (scene) must be published along" - assert item.data["publish"] is True, msg - - template_data = item.data.get("anatomyData") - rep = item.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - template_filled = publish_template.format_strict( - template_data - ) - script_path = os.path.normpath(template_filled) - - self.log.info( - "Using published scene for render {}".format(script_path) - ) - - filename = os.path.basename(script_path) - - # Documentation for keys available at: - # https://docs.thinkboxsoftware.com - # /products/deadline/8.0/1_User%20Manual/manual - # /manual-submission.html#job-info-file-options - payload = { - "JobInfo": { - # Top-level group name - "BatchName": filename, - - # Asset dependency to wait for at least the scene file to sync. - "AssetDependency0": script_path, - - # Job name, as seen in Monitor - "Name": filename, - - "Priority": attribute_values.get( - "priority", self.priority), - "ChunkSize": attribute_values.get( - "chunk", self.chunk_size), - "ConcurrentTasks": attribute_values.get( - "concurrency", - self.concurrent_tasks - ), - - # User, as seen in Monitor - "UserName": deadline_user, - - "Pool": instance.data.get("primaryPool"), - "SecondaryPool": instance.data.get("secondaryPool"), - "Group": self.group, - - "Plugin": self.plugin, - "Frames": "{start}-{end}".format( - start=int(instance.data["frameStartHandle"]), - end=int(instance.data["frameEndHandle"]) - ), - - "Comment": comment, - }, - "PluginInfo": { - # Input - "FlowFile": script_path, - - # Mandatory for Deadline - "Version": str(instance.data["app_version"]), - - # Render in high quality - "HighQuality": True, - - # Whether saver output should be checked after rendering - # is complete - "CheckOutput": True, - - # Proxy: higher numbers smaller images for faster test renders - # 1 = no proxy quality - "Proxy": 1 - }, - - # Mandatory for Deadline, may be empty - "AuxFiles": [] - } - - # Enable going to rendered frames from Deadline Monitor - for index, instance in enumerate(saver_instances): - head, padding, tail = get_frame_path( - instance.data["expectedFiles"][0] + # Store the response for dependent job submission plug-ins for all + # the instances + for saver_instance in saver_instances: + saver_instance.data["deadlineSubmissionJob"] = ( + instance.data["deadlineSubmissionJob"] ) - path = "{}{}{}".format(head, "#" * padding, tail) - folder, filename = os.path.split(path) - payload["JobInfo"]["OutputDirectory%d" % index] = folder - payload["JobInfo"]["OutputFilename%d" % index] = filename - - # Set job environment variables - environment = get_instance_job_envs(instance) - environment.update(get_ayon_render_job_envs()) - - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - - self.log.debug("Submitting..") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # E.g. http://192.168.0.1:8082/api/jobs - url = "{}/api/jobs".format(deadline_url) - auth = instance.data["deadline"]["auth"] - verify = instance.data["deadline"]["verify"] - response = requests_post(url, json=payload, auth=auth, verify=verify) - if not response.ok: - raise Exception(response.text) - - # Store the response for dependent job submission plug-ins - for instance in saver_instances: - instance.data["deadlineSubmissionJob"] = response.json() + + def get_job_info(self, job_info=None, **kwargs): + instance = self._instance + + # Deadline requires integers in frame range + job_info.Plugin = self.plugin or "Fusion" + job_info.Frames = "{start}-{end}".format( + start=int(instance.data["frameStartHandle"]), + end=int(instance.data["frameEndHandle"]) + ) + + # We override the default behavior of AbstractSubmitDeadline here to + # include the output directory and output filename for each individual + # saver instance, instead of only the current instance, because we're + # submitting one job for multiple savers + for saver_instance in instance.data["_farmSaverInstances"]: + if saver_instance is instance: + continue + + exp = instance.data.get("expectedFiles") + for filepath in iter_expected_files(exp): + job_info.OutputDirectory += os.path.dirname(filepath) + job_info.OutputFilename += os.path.basename(filepath) + + return job_info + + def get_plugin_info(self): + instance = self._instance + plugin_info = FusionPluginInfo( + FlowFile=self.scene_path, + Version=str(instance.data["app_version"]), + ) + plugin_payload: dict = attr.asdict(plugin_info) + return plugin_payload \ No newline at end of file From af1e4f24ee5b2b801c6b94332a5b67f535dc260c Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 4 Nov 2024 16:51:05 +0100 Subject: [PATCH 065/153] Improve `Delay Job` attribute definition (less long label, add placeholder plus describe more in tooltip) --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index bef42a24f7..4b4b9ae58c 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -163,8 +163,10 @@ def _get_artist_overrides(cls, overrides, profile): ), TextDef( "job_delay", - label="Delay job (timecode dd:hh:mm:ss)", + label="Delay job", default=default_values["job_delay"], + tooltip="Delay job by specified timecode. Format: dd:hh:mm:ss", + placeholder="00:00:00:00" ) ] From f6c39742a485de545615738f286f54bb47259560 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Mon, 4 Nov 2024 16:51:35 +0100 Subject: [PATCH 066/153] Add some type hints to the code --- client/ayon_deadline/abstract_submit_deadline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 74915bd0f6..aff12faa1c 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -24,6 +24,7 @@ ) from ayon_core.pipeline.farm.tools import iter_expected_files from ayon_core.lib import is_in_tests +from ayon_deadline.lib import AYONDeadlineJobInfo JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) @@ -163,10 +164,9 @@ def process_submission(self): verify = self._instance.data["deadline"]["verify"] return self.submit(payload, auth, verify) - def get_generic_job_info(self, instance): - context = instance.context - - job_info = instance.data["deadline"]["job_info"] + def get_generic_job_info(self, instance: pyblish.api.Instance): + context: pyblish.api.Context = instance.context + job_info: AYONDeadlineJobInfo = instance.data["deadline"]["job_info"] # Always use the original work file name for the Job name even when # rendering is done from the published Work File. The original work From d8c44d9a822e3b2917e6b3394c04e5f962df41f4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 16:58:21 +0100 Subject: [PATCH 067/153] Moved pools from separate plugin to collect_jobinfo --- .../ayon_deadline/abstract_submit_deadline.py | 7 +- client/ayon_deadline/lib.py | 2 + .../plugins/publish/global/collect_jobinfo.py | 61 ++++++++++-- .../plugins/publish/global/collect_pools.py | 92 ------------------- .../publish/global/validate_deadline_pools.py | 15 ++- server/settings/publish_plugins.py | 16 +--- 6 files changed, 69 insertions(+), 124 deletions(-) delete mode 100644 client/ayon_deadline/plugins/publish/global/collect_pools.py diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 74915bd0f6..39a7d3edda 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -182,9 +182,10 @@ def get_generic_job_info(self, instance): job_info.UserName = context.data.get("deadlineUser", getpass.getuser()) # TODO clean deadlineUser job_info.Comment = context.data.get("comment") - # TODO switch to job_info collector - job_info.Pool = instance.data.get("primaryPool") - job_info.SecondaryPool = instance.data.get("secondaryPool") + if job_info.Pool != "none": + job_info.Pool = job_info.Pool + if job_info.SecondaryPool != "none": + job_info.SecondaryPool = job_info.SecondaryPool exp = instance.data.get("expectedFiles") for filepath in iter_expected_files(exp): diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 5fe63797c3..8231f85b72 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -373,6 +373,8 @@ def from_dict(cls, data: Dict) -> 'AYONDeadlineJobInfo': "MachineLimit": data["machine_limit"], "ConcurrentTasks": data["concurrent_tasks"], "Frames": data["frames"], + "Pool": data["primary_pool"], + "SecondaryPool": data["secondary_pool"], # fields needed for logic, values unavailable during collection "UsePublished": data["use_published"], diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index a298585b1d..6de4e6c3fb 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -5,11 +5,13 @@ from ayon_core.lib import ( BoolDef, NumberDef, + EnumDef, TextDef, UISeparatorDef ) from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.lib.profiles_filtering import filter_profiles +from ayon_core.addon import AddonsManager from ayon_deadline.lib import ( FARM_FAMILIES, @@ -34,6 +36,7 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): targets = ["local"] profiles = [] + pool_enum_values = [] def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) @@ -77,6 +80,21 @@ def apply_settings(cls, project_settings): cls.profiles = profiles or [] + addons_manager = AddonsManager() + deadline_addon = addons_manager["deadline"] + deadline_server_name = settings["deadline_server"] + dl_server_info = deadline_addon.deadline_servers_info.get( + deadline_server_name) + + auth = (dl_server_info["default_username"], + dl_server_info["default_password"]) + pools = deadline_addon.get_deadline_pools( + dl_server_info["value"], + auth + ) + for pool in pools: + cls.pool_enum_values.append({"value": pool, "label": pool}) + @classmethod def get_attr_defs_for_instance(cls, create_context, instance): host_name = create_context.host_name @@ -139,11 +157,11 @@ def _get_artist_overrides(cls, overrides, profile): default_value = ",".join(default_value) default_values[key] = default_value - override_defs = [ + attr_defs = [ NumberDef( "chunk_size", label="Frames Per Task", - default=default_values["chunk_size"], + default=default_values.get("chunk_size"), decimals=0, minimum=1, maximum=1000 @@ -151,28 +169,56 @@ def _get_artist_overrides(cls, overrides, profile): NumberDef( "priority", label="Priority", - default=default_values["priority"], + default=default_values.get("priority"), decimals=0 ), TextDef( "department", label="Department", - default=default_values["department"] + default=default_values.get("department") ), TextDef( "limit_groups", label="Limit Groups", # multiline=True, TODO - some DCC might have issues with storing multi lines - default=default_values["limit_groups"], + default=default_values.get("limit_groups"), placeholder="machine1,machine2" ), + EnumDef( + "primary_pool", + label="Primary pool", + default="none", + items=cls.pool_enum_values, + ), + EnumDef( + "secondary_pool", + label="Secondary pool", + default="none", + items=cls.pool_enum_values, + ), + TextDef( + "machine_list", + label="Machine list", + default=default_values.get("machine_list") + ), + BoolDef( + "machine_list_deny", + label="Machine List is a Deny", + default=default_values.get("machine_list_deny") + ), TextDef( "job_delay", label="Delay job (timecode dd:hh:mm:ss)", - default=default_values["job_delay"], + default=default_values.get("job_delay"), ) ] + override_defs = [] + for attr_def in attr_defs: + if attr_def.key not in overrides: + continue + override_defs.append(attr_def) + return override_defs @classmethod @@ -185,9 +231,6 @@ def on_values_changed(cls, event): instance = instance_change["instance"] if not cls.instance_matches_plugin_families(instance): continue - value_changes = instance_change["changes"] - if "enabled" not in value_changes: - continue new_attrs = cls.get_attr_defs_for_instance( event["create_context"], instance diff --git a/client/ayon_deadline/plugins/publish/global/collect_pools.py b/client/ayon_deadline/plugins/publish/global/collect_pools.py deleted file mode 100644 index 6de68502f1..0000000000 --- a/client/ayon_deadline/plugins/publish/global/collect_pools.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -from ayon_core.lib import TextDef -from ayon_core.pipeline.publish import AYONPyblishPluginMixin - -from ayon_deadline.lib import FARM_FAMILIES - - -class CollectDeadlinePools(pyblish.api.InstancePlugin, - AYONPyblishPluginMixin): - """Collect pools from instance or Publisher attributes, from Setting - otherwise. - - Pools are used to control which DL workers could render the job. - - Pools might be set: - - directly on the instance (set directly in DCC) - - from Publisher attributes - - from defaults from Settings. - - Publisher attributes could be shown even for instances that should be - rendered locally as visibility is driven by product type of the instance - (which will be `render` most likely). - (Might be resolved in the future and class attribute 'families' should - be cleaned up.) - - """ - - order = pyblish.api.CollectorOrder + 0.420 - label = "Collect Deadline Pools" - hosts = [ - "aftereffects", - "fusion", - "harmony", - "maya", - "max", - "houdini", - "nuke", - "unreal" - ] - - families = FARM_FAMILIES - - primary_pool = None - secondary_pool = None - - @classmethod - def apply_settings(cls, project_settings): - # deadline.publish.CollectDeadlinePools - settings = project_settings["deadline"]["publish"]["CollectDeadlinePools"] # noqa - cls.primary_pool = settings.get("primary_pool", None) - cls.secondary_pool = settings.get("secondary_pool", None) - - def process(self, instance): - attr_values = self.get_attr_values_from_data(instance.data) - if not instance.data.get("primaryPool"): - instance.data["primaryPool"] = ( - attr_values.get("primaryPool") or self.primary_pool or "none" - ) - if instance.data["primaryPool"] == "-": - instance.data["primaryPool"] = None - - if not instance.data.get("secondaryPool"): - instance.data["secondaryPool"] = ( - attr_values.get("secondaryPool") or self.secondary_pool or "none" # noqa - ) - - if instance.data["secondaryPool"] == "-": - instance.data["secondaryPool"] = None - - @classmethod - def get_attribute_defs(cls): - # TODO: Preferably this would be an enum for the user - # but the Deadline server URL can be dynamic and - # can be set per render instance. Since get_attribute_defs - # can't be dynamic unfortunately EnumDef isn't possible (yet?) - # pool_names = self.deadline_addon.get_deadline_pools(deadline_url, - # self.log) - # secondary_pool_names = ["-"] + pool_names - - return [ - TextDef("primaryPool", - label="Primary Pool", - default=cls.primary_pool, - tooltip="Deadline primary pool, " - "applicable for farm rendering"), - TextDef("secondaryPool", - label="Secondary Pool", - default=cls.secondary_pool, - tooltip="Deadline secondary pool, " - "applicable for farm rendering") - ] diff --git a/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py b/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py index c7445465c4..196fa4b183 100644 --- a/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py +++ b/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py @@ -4,6 +4,7 @@ PublishXmlValidationError, OptionalPyblishPluginMixin ) +from ayon_deadline.lib import FARM_FAMILIES class ValidateDeadlinePools(OptionalPyblishPluginMixin, @@ -16,14 +17,9 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, label = "Validate Deadline Pools" order = pyblish.api.ValidatorOrder - families = ["rendering", - "render.farm", - "render.frames_farm", - "renderFarm", - "renderlayer", - "maxrender", - "publish.hou"] + families = FARM_FAMILIES optional = True + targets = ["local"] # cache pools_per_url = {} @@ -46,11 +42,12 @@ def process(self, instance): ) invalid_pools = {} - primary_pool = instance.data.get("primaryPool") + job_info = instance.data["deadline"]["job_info"] + primary_pool = job_info.Pool if primary_pool and primary_pool not in pools: invalid_pools["primary"] = primary_pool - secondary_pool = instance.data.get("secondaryPool") + secondary_pool = job_info.SecondaryPool if secondary_pool and secondary_pool not in pools: invalid_pools["secondary"] = secondary_pool diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index b939004fd2..4d2578f8de 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -27,15 +27,8 @@ class CollectAYONServerToFarmJobModel(BaseSettingsModel): enabled: bool = SettingsField(False, title="Enabled") -class CollectDeadlinePoolsModel(BaseSettingsModel): - """Settings Deadline default pools.""" - - primary_pool: str = SettingsField(title="Primary Pool") - - secondary_pool: str = SettingsField(title="Secondary Pool") - - def extract_jobinfo_overrides_enum(): + """Enum of fields that could be overridden by artist in Publisher UI""" return [ {"label": "Department", "value": "department"}, {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, @@ -43,6 +36,8 @@ def extract_jobinfo_overrides_enum(): {"label": "Group", "value": "group"}, {"label": "Priority", "value": "priority"}, {"label": "Limit groups", "value": "limit_groups"}, + {"label": "Primary pool", "value": "primary_pool"}, + {"label": "Secondary pool", "value": "secondary_pool"}, {"label": "Machine List", "value": "machine_list"}, {"label": "Machine List is a Deny", "value": "machine_list_deny"}, ] @@ -73,6 +68,8 @@ class CollectJobInfoItem(BaseSettingsModel): default_factory=list, title="Limit Groups" ) + primary_pool: str = SettingsField("", title="Primary Pool") + secondary_pool: str = SettingsField("", title="Secondary Pool") machine_limit: int = SettingsField( 0, title="Machine Limit", @@ -387,9 +384,6 @@ def validate_unique_names(cls, value): class PublishPluginsModel(BaseSettingsModel): - CollectDeadlinePools: CollectDeadlinePoolsModel = SettingsField( - default_factory=CollectDeadlinePoolsModel, - title="Default Pools") CollectJobInfo: CollectJobInfoModel = SettingsField( default_factory=CollectJobInfoModel, title="Collect JobInfo") From b5f178be20a4982ddad83beb4de7bc019e1debd2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 17:26:29 +0100 Subject: [PATCH 068/153] Added group to Publisher UI --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 6de4e6c3fb..b750931c48 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -177,6 +177,11 @@ def _get_artist_overrides(cls, overrides, profile): label="Department", default=default_values.get("department") ), + TextDef( + "group", + label="Group", + default=default_values.get("group") + ), TextDef( "limit_groups", label="Limit Groups", From 124d7153d9987bf2235fa99413401a085e53244a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 17:57:54 +0100 Subject: [PATCH 069/153] Fix limit group placeholder --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 8336bb0950..32c0f2a2b1 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -187,7 +187,7 @@ def _get_artist_overrides(cls, overrides, profile): label="Limit Groups", # multiline=True, TODO - some DCC might have issues with storing multi lines default=default_values.get("limit_groups"), - placeholder="machine1,machine2" + placeholder="limit1,limit2" ), EnumDef( "primary_pool", From 42658bcaee7c6216c86b32473d23d95e8598999b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 18:01:36 +0100 Subject: [PATCH 070/153] Fix empty plugin info --- client/ayon_deadline/abstract_submit_deadline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 22bdb16dbc..8d97e2c646 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -110,7 +110,8 @@ def process(self, instance): self.aux_files = self.get_aux_files() plugin_info_data = instance.data["deadline"]["plugin_info_data"] - self.apply_additional_plugin_info(plugin_info_data) + if plugin_info_data: + self.apply_additional_plugin_info(plugin_info_data) job_id = self.process_submission() self.log.info("Submitted job to Deadline: {}.".format(job_id)) From d202aaf52b6757afd8d836e5b8eefd5f74111d95 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 18:56:57 +0100 Subject: [PATCH 071/153] Implemented enum fields and querying for them Implemented groups, machines, limit_groups --- client/ayon_deadline/addon.py | 133 +++++++++++++++++- client/ayon_deadline/lib.py | 17 ++- .../plugins/publish/global/collect_jobinfo.py | 64 ++++++--- 3 files changed, 191 insertions(+), 23 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index f697c65974..e5ee0d483c 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -38,6 +38,11 @@ def initialize(self, studio_settings): self.deadline_servers_info = deadline_servers_info + self._pools_per_server = {} + self._limit_groups_per_server = {} + self._groups_per_server = {} + self._machines_per_server = {} + def get_plugin_paths(self): """Deadline plugin paths.""" # Note: We are not returning `publish` key because we have overridden @@ -67,26 +72,146 @@ def get_deadline_pools(webservice, auth=None, log=None): RuntimeError: If deadline webservice is unreachable. """ + endpoint = "{}/api/pools?NamesOnly=true".format(webservice) + return DeadlineAddon._get_deadline_info( + endpoint, auth, log, item_type="pools") + + @staticmethod + def get_deadline_groups(webservice, auth=None, log=None): + """Get Groups from Deadline. + Args: + webservice (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + Returns: + List[str]: Limit Groups. + Throws: + RuntimeError: If deadline webservice is unreachable. + + """ + endpoint = "{}/api/groups".format(webservice) + return DeadlineAddon._get_deadline_info( + endpoint, auth, log, item_type="groups") + + @staticmethod + def get_deadline_limit_groups(webservice, auth=None, log=None): + """Get Limit Groups from Deadline. + Args: + webservice (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + Returns: + List[str]: Limit Groups. + Throws: + RuntimeError: If deadline webservice is unreachable. + + """ + endpoint = "{}/api/limitgroups?NamesOnly=true".format(webservice) + return DeadlineAddon._get_deadline_info( + endpoint, auth, log, item_type="limitgroups") + + @staticmethod + def get_deadline_workers(webservice, auth=None, log=None): + """Get Groups from Deadline. + Args: + webservice (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + Returns: + List[str]: Limit Groups. + Throws: + RuntimeError: If deadline webservice is unreachable. + + """ + endpoint = "{}/api/slaves?NamesOnly=true".format(webservice) + return DeadlineAddon._get_deadline_info( + endpoint, auth, log, item_type="workers") + + @staticmethod + def _get_deadline_info(endpoint, auth=None, log=None, item_type=None): from .abstract_submit_deadline import requests_get if not log: log = Logger.get_logger(__name__) - argument = "{}/api/pools?NamesOnly=true".format(webservice) try: kwargs = {} if auth: kwargs["auth"] = auth - response = requests_get(argument, **kwargs) + response = requests_get(endpoint, **kwargs) except requests.exceptions.ConnectionError as exc: - msg = 'Cannot connect to DL web service {}'.format(webservice) + msg = 'Cannot connect to DL web service {}'.format(endpoint) log.error(msg) six.reraise( DeadlineWebserviceError, DeadlineWebserviceError('{} - {}'.format(msg, exc)), sys.exc_info()[2]) if not response.ok: - log.warning("No pools retrieved") + log.warning(f"No {item_type} retrieved") return [] return response.json() + + def pools_per_server(self, server_name): + pools = self._pools_per_server.get(server_name) + if pools is None: + dl_server_info = self.deadline_servers_info.get(server_name) + + auth = (dl_server_info["default_username"], + dl_server_info["default_password"]) + pools = self.get_deadline_pools( + dl_server_info["value"], + auth + ) + self._pools_per_server[server_name] = pools + + return pools + + def groups_per_server(self, server_name): + groups = self._groups_per_server.get(server_name) + if groups is None: + dl_server_info = self.deadline_servers_info.get(server_name) + + auth = (dl_server_info["default_username"], + dl_server_info["default_password"]) + groups = self.get_deadline_groups( + dl_server_info["value"], + auth + ) + self._groups_per_server[server_name] = groups + + return groups + + def limit_groups_per_server(self, server_name): + limit_groups = self._limit_groups_per_server.get(server_name) + if limit_groups is None: + dl_server_info = self.deadline_servers_info.get(server_name) + + auth = (dl_server_info["default_username"], + dl_server_info["default_password"]) + limit_groups = self.get_deadline_limit_groups( + dl_server_info["value"], + auth + ) + self._limit_groups_per_server[server_name] = limit_groups + + return limit_groups + + def machines_per_server(self, server_name): + machines = self._machines_per_server.get(server_name) + if machines is None: + dl_server_info = self.deadline_servers_info.get(server_name) + + auth = (dl_server_info["default_username"], + dl_server_info["default_password"]) + machines = self.get_deadline_workers( + dl_server_info["value"], + auth + ) + self._machines_per_server[server_name] = machines + + return machines + diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 8231f85b72..aa31467110 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -373,8 +373,9 @@ def from_dict(cls, data: Dict) -> 'AYONDeadlineJobInfo': "MachineLimit": data["machine_limit"], "ConcurrentTasks": data["concurrent_tasks"], "Frames": data["frames"], - "Pool": data["primary_pool"], - "SecondaryPool": data["secondary_pool"], + "Group": cls._sanitize(data["group"]), + "Pool": cls._sanitize(data["primary_pool"]), + "SecondaryPool": cls._sanitize(data["secondary_pool"]), # fields needed for logic, values unavailable during collection "UsePublished": data["use_published"], @@ -400,3 +401,15 @@ def add_instance_job_env_vars(self, instance): def to_json(self) -> str: """Serialize the dataclass instance to a JSON string.""" return json.dumps(asdict(self)) + + @classmethod + def _sanitize(cls, value) -> str: + if isinstance(value, str): + if value == "none": + return None + if isinstance(value, list): + filtered = [] + for val in value: + if val and val != "none": + filtered.append(val) + return filtered diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 32c0f2a2b1..0884d217f0 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -37,6 +37,9 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): profiles = [] pool_enum_values = [] + group_enum_values = [] + limit_group_enum_values = [] + machines_enum_values = [] def process(self, instance): attr_values = self._get_jobinfo_defaults(instance) @@ -68,7 +71,7 @@ def _handle_additional_jobinfo(self,attr_values, job_info): def _handle_machine_list(self, attr_values, job_info): machine_list = attr_values["machine_list"] if machine_list: - if job_info.MachineListDeny: + if attr_values["machine_list_deny"]: job_info.Blacklist = machine_list else: job_info.Whitelist = machine_list @@ -83,18 +86,28 @@ def apply_settings(cls, project_settings): addons_manager = AddonsManager() deadline_addon = addons_manager["deadline"] deadline_server_name = settings["deadline_server"] - dl_server_info = deadline_addon.deadline_servers_info.get( - deadline_server_name) - - auth = (dl_server_info["default_username"], - dl_server_info["default_password"]) - pools = deadline_addon.get_deadline_pools( - dl_server_info["value"], - auth - ) + pools = deadline_addon.pools_per_server(deadline_server_name) for pool in pools: cls.pool_enum_values.append({"value": pool, "label": pool}) + groups = deadline_addon.groups_per_server(deadline_server_name) + for group in groups: + cls.group_enum_values.append({"value": group, "label": group}) + + limit_groups = ( + deadline_addon.limit_groups_per_server(deadline_server_name)) + if not limit_groups: + limit_groups.append("none") # enum cannot be empty + for limit_group in limit_groups: + cls.limit_group_enum_values.append( + {"value": limit_group, "label": limit_group}) + + machines = ( + deadline_addon.machines_per_server(deadline_server_name)) + for machine in machines: + cls.machines_enum_values.append( + {"value": machine, "label": machine}) + @classmethod def get_attr_defs_for_instance(cls, create_context, instance): host_name = create_context.host_name @@ -153,8 +166,22 @@ def _get_artist_overrides(cls, overrides, profile): default_values = {} for key in overrides: default_value = profile[key] + if key == "machine_limit": + filtered = [] + for value in default_value: + if value in cls.machines_enum_values: + filtered.append(value) + default_value = filtered + if key == "limit_groups": + filtered = [] + for value in default_value: + if value in cls.limit_group_enum_values: + filtered.append(value) + default_value = filtered if isinstance(default_value, list): default_value = ",".join(default_value) + if key == "group" and default_value not in cls.group_enum_values: + default_value = "" default_values[key] = default_value attr_defs = [ @@ -177,17 +204,18 @@ def _get_artist_overrides(cls, overrides, profile): label="Department", default=default_values.get("department") ), - TextDef( + EnumDef( "group", label="Group", - default=default_values.get("group") + default=default_values.get("group"), + items=cls.group_enum_values, ), - TextDef( + EnumDef( "limit_groups", label="Limit Groups", - # multiline=True, TODO - some DCC might have issues with storing multi lines + multiselection=True, default=default_values.get("limit_groups"), - placeholder="limit1,limit2" + items=cls.limit_group_enum_values, ), EnumDef( "primary_pool", @@ -201,10 +229,12 @@ def _get_artist_overrides(cls, overrides, profile): default="none", items=cls.pool_enum_values, ), - TextDef( + EnumDef( "machine_list", label="Machine list", - default=default_values.get("machine_list") + multiselection=True, + default=default_values.get("machine_list"), + items=cls.machines_enum_values, ), BoolDef( "machine_list_deny", From 13c80eca1e28a0a87949d5b4fc5c2620d31bbffe Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 4 Nov 2024 19:02:26 +0100 Subject: [PATCH 072/153] Fix removing job_info --- .../plugins/publish/global/submit_publish_cache_job.py | 1 + .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py index 8e6f3bbe21..176514a618 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py @@ -348,6 +348,7 @@ def process(self, instance): if "deadline" not in inst: inst["deadline"] = {} inst["deadline"] = instance.data["deadline"] + inst.data["deadline"].pop("job_info") # publish job file publish_job = { diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 04bfa067a2..50fa317aac 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,7 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: - instance.data["deadline"].pop("job_info") + inst.data["deadline"].pop("job_info") inst["deadline"] = instance.data["deadline"] # publish job file From 2cb890aa18b2913efcc91222d620d5e2530dde12 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:28:56 +0100 Subject: [PATCH 073/153] Formatting added last empty line Co-authored-by: Roy Nieterau --- .../plugins/publish/fusion/submit_fusion_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py index 8b444d98d0..ef37539055 100644 --- a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py +++ b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py @@ -130,4 +130,4 @@ def get_plugin_info(self): Version=str(instance.data["app_version"]), ) plugin_payload: dict = attr.asdict(plugin_info) - return plugin_payload \ No newline at end of file + return plugin_payload From f3db2b5107cea660b6f449e5332257a8ba2def6c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:30:27 +0100 Subject: [PATCH 074/153] Fix removal of job_info Co-authored-by: Mustafa Jafar --- .../plugins/publish/global/submit_publish_cache_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py index 176514a618..fd1e44d295 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_cache_job.py @@ -348,7 +348,7 @@ def process(self, instance): if "deadline" not in inst: inst["deadline"] = {} inst["deadline"] = instance.data["deadline"] - inst.data["deadline"].pop("job_info") + inst["deadline"].pop("job_info") # publish job file publish_job = { From 7cf4c72a0bb9c57c0ffbe88220c7a5334c7da1fb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:33:06 +0100 Subject: [PATCH 075/153] Fix use keyword argument Applicable for Houdini Co-authored-by: Mustafa Jafar --- client/ayon_deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 8d97e2c646..0c315ecf95 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -101,7 +101,7 @@ def process(self, instance): assert self._deadline_url, "Requires Deadline Webservice URL" job_info = self.get_generic_job_info(instance) - self.job_info = self.get_job_info(job_info) + self.job_info = self.get_job_info(job_info=job_info) self._set_scene_path( context.data["currentFile"], job_info.UsePublished) From 193ea1c496d743f8e989d78788f8d634c6a5d26b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:43:30 +0100 Subject: [PATCH 076/153] Fix usage of keyword argument --- .../aftereffects/submit_aftereffects_deadline.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index 7c99b078b5..b3e638dd8c 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -35,18 +35,18 @@ class AfterEffectsSubmitDeadline( targets = ["local"] settings_category = "deadline" - def get_job_info(self, dln_job_info=None): - dln_job_info.Plugin = "AfterEffects" + def get_job_info(self, job_info=None): + job_info.Plugin = "AfterEffects" # already collected explicit values for rendered Frames - if not dln_job_info.Frames: + if not job_info.Frames: # Deadline requires integers in frame range frame_range = "{}-{}".format( int(round(self._instance.data["frameStart"])), int(round(self._instance.data["frameEnd"]))) - dln_job_info.Frames = frame_range + job_info.Frames = frame_range - return dln_job_info + return job_info def get_plugin_info(self): deadline_plugin_info = DeadlinePluginInfo() From 8526e5ae2ead7e8bdfe8785237755498bcbe492e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:43:45 +0100 Subject: [PATCH 077/153] Fix sending of machine_list --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 0884d217f0..47969e94b7 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -72,9 +72,9 @@ def _handle_machine_list(self, attr_values, job_info): machine_list = attr_values["machine_list"] if machine_list: if attr_values["machine_list_deny"]: - job_info.Blacklist = machine_list + job_info.Blacklist = ",".join(machine_list) else: - job_info.Whitelist = machine_list + job_info.Whitelist = ",".join(machine_list) @classmethod def apply_settings(cls, project_settings): From 3b034008f7825da03675f445fe023b7f20d8c90e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:44:53 +0100 Subject: [PATCH 078/153] Fix removal job_info --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 50fa317aac..4eb96ed4b7 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,8 +466,8 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: - inst.data["deadline"].pop("job_info") inst["deadline"] = instance.data["deadline"] + inst["deadline"].pop("job_info") # publish job file publish_job = { From bd3010fa93f858e61ca398e113708ac9299cc5c5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 5 Nov 2024 12:58:50 +0100 Subject: [PATCH 079/153] Fix do not trigger on not farm instances --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 47969e94b7..1983682a3c 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -42,6 +42,10 @@ class CollectJobInfo(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): machines_enum_values = [] def process(self, instance): + if not instance.data.get("farm"): + self.log.debug("Should not be processed on farm, skipping.") + return + attr_values = self._get_jobinfo_defaults(instance) attr_values.update(self.get_attr_values_from_data(instance.data)) From 9fcc50ab1d9487e05502677eb1377f4cf49ce0fa Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Nov 2024 15:25:23 +0100 Subject: [PATCH 080/153] Fix showing attributes for not farm instances --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 1983682a3c..3aa5549941 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -114,6 +114,9 @@ def apply_settings(cls, project_settings): @classmethod def get_attr_defs_for_instance(cls, create_context, instance): + if instance.product_type not in cls.families: + return [] + host_name = create_context.host_name task_name = instance["task"] From 3b818968a6cf0f8feea0414f81a35af5687586d1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Nov 2024 15:29:44 +0100 Subject: [PATCH 081/153] Fix typo --- client/ayon_deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 0c315ecf95..e416b3200e 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -122,7 +122,7 @@ def process(self, instance): self.log.info("Export job id: %s", job_id) render_job_info = self.get_job_info( job_info=job_info, dependency_job_ids=[job_id]) - render_plugin_info = self.get_plugin_info(ob_type="render") + render_plugin_info = self.get_plugin_info(job_type="render") payload = self.assemble_payload( job_info=render_job_info, plugin_info=render_plugin_info From 3d6778c3fbf3b6f7b6a25903c5cdb74f1557ef1b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Nov 2024 15:34:49 +0100 Subject: [PATCH 082/153] Removed unnecessary class variables --- .../plugins/publish/fusion/submit_fusion_deadline.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py index ef37539055..2a84934679 100644 --- a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py +++ b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py @@ -41,11 +41,6 @@ class FusionSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, # presets plugin = None - priority = 50 - chunk_size = 1 - concurrent_tasks = 1 - group = "" - def process(self, instance): if not instance.data["farm"]: self.log.debug("Render on farm is disabled. " From 73b34005c82b8dfec85379c0e7048a15c0e03b45 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Nov 2024 18:13:45 +0100 Subject: [PATCH 083/153] Transformed Nuke to AbstractSubmitDeadline --- .../publish/nuke/submit_nuke_deadline.py | 563 ++++-------------- 1 file changed, 127 insertions(+), 436 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py index e5bf8a071e..45817d60a4 100644 --- a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py +++ b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py @@ -1,25 +1,32 @@ import os import re -import json -import getpass -from datetime import datetime +import attr import pyblish.api from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) -from ayon_core.lib import ( - is_in_tests, - BoolDef, - NumberDef -) -from ayon_deadline.abstract_submit_deadline import requests_post -from ayon_deadline.lib import get_instance_job_envs, get_ayon_render_job_envs +from ayon_deadline import abstract_submit_deadline + + +@attr.s +class NukePluginInfo: + SceneFile: str = attr.ib(default=None) # Input + Version: str = attr.ib(default=None) # Mandatory for Deadline + # Mandatory for Deadline + ProjectPath: str = attr.ib(default=None) + OutputFilePath: str = attr.ib(default=None) + # Use GPU + UseGpu: bool = attr.ib(default=True) + WriteNode: str = attr.ib(default=None) -class NukeSubmitDeadline(pyblish.api.InstancePlugin, - AYONPyblishPluginMixin): + +class NukeSubmitDeadline( + abstract_submit_deadline.AbstractSubmitDeadline, + AYONPyblishPluginMixin +): """Submit write to Deadline Renders are submitted to a Deadline Web Service as @@ -33,436 +40,150 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, families = ["render", "prerender"] optional = True targets = ["local"] - settings_category = "deadline" - - # presets - priority = 50 - chunk_size = 1 - concurrent_tasks = 1 - group = "" - department = "" - limit_groups = [] - use_gpu = False - env_allowed_keys = [] - env_search_replace_values = [] - workfile_dependency = True - use_published_workfile = True - - @classmethod - def get_attribute_defs(cls): - return [ - NumberDef( - "priority", - label="Priority", - default=cls.priority, - decimals=0 - ), - NumberDef( - "chunk", - label="Frames Per Task", - default=cls.chunk_size, - decimals=0, - minimum=1, - maximum=1000 - ), - NumberDef( - "concurrency", - label="Concurrency", - default=cls.concurrent_tasks, - decimals=0, - minimum=1, - maximum=10 - ), - BoolDef( - "use_gpu", - default=cls.use_gpu, - label="Use GPU" - ), - BoolDef( - "workfile_dependency", - default=cls.workfile_dependency, - label="Workfile Dependency" - ), - BoolDef( - "use_published_workfile", - default=cls.use_published_workfile, - label="Use Published Workfile" - ) - ] def process(self, instance): - if not instance.data.get("farm"): - self.log.debug("Skipping local instance.") - return - instance.data["attributeValues"] = self.get_attr_values_from_data( - instance.data) + """Plugin entry point.""" + self._instance = instance - families = instance.data["families"] - - node = instance.data["transientData"]["node"] context = instance.context + self._deadline_url = instance.data["deadline"]["url"] + assert self._deadline_url, "Requires Deadline Webservice URL" - deadline_url = instance.data["deadline"]["url"] - assert deadline_url, "Requires Deadline Webservice URL" + # adding expected files to instance.data + write_node = instance.data["transientData"]["node"] + render_path = instance.data["path"] + start_frame = int(instance.data["frameStartHandle"]) + end_frame = int(instance.data["frameEndHandle"]) + self._expected_files( + instance, + render_path, + start_frame, + end_frame + ) - self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = context.data.get("comment", "") - self._ver = re.search(r"\d+\.\d+", context.data.get("hostVersion")) - self._deadline_user = context.data.get( - "deadlineUser", getpass.getuser()) - submit_frame_start = int(instance.data["frameStartHandle"]) - submit_frame_end = int(instance.data["frameEndHandle"]) + job_info = self.get_generic_job_info(instance) + self.job_info = self.get_job_info(job_info=job_info) - # get output path - render_path = instance.data['path'] - script_path = context.data["currentFile"] + self._set_scene_path( + context.data["currentFile"], job_info.UsePublished) - use_published_workfile = instance.data["attributeValues"].get( - "use_published_workfile", self.use_published_workfile + self.plugin_info = self.get_plugin_info( + scene_path=self.scene_path, + render_path=render_path, + write_node_name=write_node.name() ) - if use_published_workfile: - script_path = self._get_published_workfile_path(context) - # only add main rendering job if target is not frames_farm - r_job_response_json = None + self.aux_files = self.get_aux_files() + + plugin_info_data = instance.data["deadline"]["plugin_info_data"] + if plugin_info_data: + self.apply_additional_plugin_info(plugin_info_data) + if instance.data["render_target"] != "frames_farm": - r_job_response = self.payload_submit( - instance, - script_path, - render_path, - node.name(), - submit_frame_start, - submit_frame_end - ) - r_job_response_json = r_job_response.json() - instance.data["deadlineSubmissionJob"] = r_job_response_json - - # Store output dir for unified publisher (filesequence) + job_id = self.process_submission() + self.log.info("Submitted job to Deadline: {}.".format(job_id)) + + render_path = instance.data["path"] instance.data["outputDir"] = os.path.dirname( render_path).replace("\\", "/") instance.data["publishJobState"] = "Suspended" if instance.data.get("bakingNukeScripts"): for baking_script in instance.data["bakingNukeScripts"]: - render_path = baking_script["bakeRenderPath"] - script_path = baking_script["bakeScriptPath"] - exe_node_name = baking_script["bakeWriteNodeName"] - - b_job_response = self.payload_submit( - instance, - script_path, - render_path, - exe_node_name, - submit_frame_start, - submit_frame_end, - r_job_response_json, - baking_submission=True - ) + self.job_info.JobType = "Normal" + self.job_info.ChunkSize = 99999999 - # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = b_job_response.json() + response_data = instance.data["deadlineSubmissionJob"] + if response_data.get("_id"): + self.job_info.BatchName = response_data["Props"]["Batch"] + self.job_info.JobDependency0 = response_data["_id"] - instance.data["publishJobState"] = "Suspended" + render_path = baking_script["bakeRenderPath"] + scene_path = baking_script["bakeScriptPath"] + write_node_name = baking_script["bakeWriteNodeName"] + + self.plugin_info = self.get_plugin_info( + scene_path=scene_path, + render_path=render_path, + write_node_name=write_node_name + ) + job_id = self.process_submission() + self.log.info( + "Submitted baking job to Deadline: {}.".format(job_id)) # add to list of job Id if not instance.data.get("bakingSubmissionJobs"): instance.data["bakingSubmissionJobs"] = [] - instance.data["bakingSubmissionJobs"].append( - b_job_response.json()["_id"]) - - # redefinition of families - if "render" in instance.data["productType"]: - instance.data["family"] = "write" - instance.data["productType"] = "write" - families.insert(0, "render2d") - elif "prerender" in instance.data["productType"]: - instance.data["family"] = "write" - instance.data["productType"] = "write" - families.insert(0, "prerender") - instance.data["families"] = families - - def _get_published_workfile_path(self, context): - """This method is temporary while the class is not inherited from - AbstractSubmitDeadline""" - anatomy = context.data["anatomy"] - # WARNING Hardcoded template name 'default' > may not be used - publish_template = anatomy.get_template_item( - "publish", "default", "path" - ) - for instance in context: - if ( - instance.data["productType"] != "workfile" - # Disabled instances won't be integrated - or instance.data("publish") is False - ): - continue - template_data = instance.data["anatomyData"] - # Expect workfile instance has only one representation - representation = instance.data["representations"][0] - # Get workfile extension - repre_file = representation["files"] - self.log.info(repre_file) - ext = os.path.splitext(repre_file)[1].lstrip(".") - - # Fill template data - template_data["representation"] = representation["name"] - template_data["ext"] = ext - template_data["comment"] = None - - template_filled = publish_template.format(template_data) - script_path = os.path.normpath(template_filled) - self.log.info( - "Using published scene for render {}".format( - script_path - ) - ) - return script_path + instance.data["bakingSubmissionJobs"].append(job_id) - return None + def get_job_info(self, job_info=None, **kwargs): + instance = self._instance - def payload_submit( - self, - instance, - script_path, - render_path, - exe_node_name, - start_frame, - end_frame, - response_data=None, - baking_submission=False, - ): - """Submit payload to Deadline - - Args: - instance (pyblish.api.Instance): pyblish instance - script_path (str): path to nuke script - render_path (str): path to rendered images - exe_node_name (str): name of the node to render - start_frame (int): start frame - end_frame (int): end frame - response_data Optional[dict]: response data from - previous submission - baking_submission Optional[bool]: if it's baking submission - - Returns: - requests.Response - """ - render_dir = os.path.normpath(os.path.dirname(render_path)) - - # batch name - src_filepath = instance.context.data["currentFile"] - batch_name = os.path.basename(src_filepath) - job_name = os.path.basename(render_path) - - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - - output_filename_0 = self.preview_fname(render_path) - - if not response_data: - response_data = {} - - try: - # Ensure render folder exists - os.makedirs(render_dir) - except OSError: - pass - - # resolve any limit groups - limit_groups = self.get_limit_groups() - self.log.debug("Limit groups: `{}`".format(limit_groups)) - - payload = { - "JobInfo": { - # Top-level group name - "BatchName": batch_name, - - # Job name, as seen in Monitor - "Name": job_name, - - # Arbitrary username, for visualisation in Monitor - "UserName": self._deadline_user, - - "Priority": instance.data["attributeValues"].get( - "priority", self.priority), - "ChunkSize": instance.data["attributeValues"].get( - "chunk", self.chunk_size), - "ConcurrentTasks": instance.data["attributeValues"].get( - "concurrency", - self.concurrent_tasks - ), - - "Department": self.department, - - "Pool": instance.data.get("primaryPool"), - "SecondaryPool": instance.data.get("secondaryPool"), - "Group": self.group, - - "Plugin": "Nuke", - "Frames": "{start}-{end}".format( - start=start_frame, - end=end_frame - ), - "Comment": self._comment, - - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - "OutputFilename0": output_filename_0.replace("\\", "/"), - - # limiting groups - "LimitGroups": ",".join(limit_groups) - - }, - "PluginInfo": { - # Input - "SceneFile": script_path, - - # Output directory and filename - "OutputFilePath": render_dir.replace("\\", "/"), - # "OutputFilePrefix": render_variables["filename_prefix"], - - # Mandatory for Deadline - "Version": self._ver.group(), - - # Resolve relative references - "ProjectPath": script_path, - "AWSAssetFile0": render_path, - - # using GPU by default - "UseGpu": instance.data["attributeValues"].get( - "use_gpu", self.use_gpu), - - # Only the specific write node is rendered. - "WriteNode": exe_node_name - }, - - # Mandatory for Deadline, may be empty - "AuxFiles": [] - } - - # Add workfile dependency. - workfile_dependency = instance.data["attributeValues"].get( - "workfile_dependency", self.workfile_dependency - ) - if workfile_dependency: - payload["JobInfo"].update({"AssetDependency0": script_path}) - - # TODO: rewrite for baking with sequences - if baking_submission: - payload["JobInfo"].update({ - "JobType": "Normal", - "ChunkSize": 99999999 - }) - - if response_data.get("_id"): - payload["JobInfo"].update({ - "BatchName": response_data["Props"]["Batch"], - "JobDependency0": response_data["_id"], - }) - - # Include critical environment variables with submission - keys = [ - "NUKE_PATH", - "FOUNDRY_LICENSE" - ] - - # add allowed keys from preset if any - if self.env_allowed_keys: - keys += self.env_allowed_keys - - nuke_specific_env = { - key: os.environ[key] - for key in keys - if key in os.environ - } - - # Set job environment variables - environment = get_instance_job_envs(instance) - environment.update(get_ayon_render_job_envs()) - environment.update(nuke_specific_env) - - # finally search replace in values of any key - if self.env_search_replace_values: - for key, value in environment.items(): - for item in self.env_search_replace_values: - environment[key] = value.replace( - item["name"], item["value"] - ) - - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - - plugin = payload["JobInfo"]["Plugin"] - self.log.debug("using render plugin : {}".format(plugin)) - - self.log.debug("Submitting..") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) + job_info.Plugin = "Nuke" - # adding expected files to instance.data - self.expected_files( - instance, - render_path, - start_frame, - end_frame + start_frame = int(instance.data["frameStartHandle"]) + end_frame = int(instance.data["frameEndHandle"]) + job_info.Frames = "{start}-{end}".format( + start=start_frame, + end=end_frame ) - self.log.debug("__ expectedFiles: `{}`".format( - instance.data["expectedFiles"])) - auth = instance.data["deadline"]["auth"] - verify = instance.data["deadline"]["verify"] - response = requests_post(self.deadline_url, - json=payload, - timeout=10, - auth=auth, - verify=verify) - - if not response.ok: - raise Exception(response.text) + limit_groups = self._get_limit_groups(job_info.LimitGroups or []) + job_info.LimitGroups = limit_groups - return response + return job_info - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers""" - - for key in ("frameStart", "frameEnd"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) + def get_plugin_info( + self, scene_path=None, render_path=None, write_node_name=None): + instance = self._instance + context = instance.context + version = re.search(r"\d+\.\d+", context.data.get("hostVersion")) + + render_dir = os.path.dirname(render_path) + plugin_info = NukePluginInfo( + SceneFile=scene_path, + Version=version.group(), + OutputFilePath=render_dir.replace("\\", "/"), + ProjectPath=scene_path, + UseGpu=True, + WriteNode=write_node_name + ) - def preview_fname(self, path): - """Return output file path with #### for padding. + plugin_payload: dict = attr.asdict(plugin_info) + return plugin_payload - Deadline requires the path to be formatted with # in place of numbers. - For example `/path/to/render.####.png` + def _get_limit_groups(self, limit_groups): + """Search for limit group nodes and return group name. + Limit groups will be defined as pairs in Nuke deadline submitter + presents where the key will be name of limit group and value will be + a list of plugin's node class names. Thus, when a plugin uses more + than one node, these will be captured and the triggered process + will add the appropriate limit group to the payload jobinfo attributes. + Returning: + list: captured groups list + """ + # Not all hosts can import this module. + import nuke - Args: - path (str): path to rendered images + captured_groups = [] + for limit_group in limit_groups: + lg_name = limit_group["name"] - Returns: - str + for node_class in limit_group["value"]: + for node in nuke.allNodes(recurseGroups=True): + # ignore all nodes not member of defined class + if node.Class() not in node_class: + continue + # ignore all disabled nodes + if node["disable"].value(): + continue + # add group name if not already added + if lg_name not in captured_groups: + captured_groups.append(lg_name) + return captured_groups - """ - self.log.debug("_ path: `{}`".format(path)) - if "%" in path: - search_results = re.search(r"(%0)(\d)(d.)", path).groups() - self.log.debug("_ search_results: `{}`".format(search_results)) - return int(search_results[1]) - if "#" in path: - self.log.debug("_ path: `{}`".format(path)) - return path - - def expected_files( + def _expected_files( self, instance, filepath, @@ -515,33 +236,3 @@ def expected_files( for i in range(start_frame, (end_frame + 1)): instance.data["expectedFiles"].append( os.path.join(dirname, (file % i)).replace("\\", "/")) - - def get_limit_groups(self): - """Search for limit group nodes and return group name. - Limit groups will be defined as pairs in Nuke deadline submitter - presents where the key will be name of limit group and value will be - a list of plugin's node class names. Thus, when a plugin uses more - than one node, these will be captured and the triggered process - will add the appropriate limit group to the payload jobinfo attributes. - Returning: - list: captured groups list - """ - # Not all hosts can import this module. - import nuke - - captured_groups = [] - for limit_group in self.limit_groups: - lg_name = limit_group["name"] - - for node_class in limit_group["value"]: - for node in nuke.allNodes(recurseGroups=True): - # ignore all nodes not member of defined class - if node.Class() not in node_class: - continue - # ignore all disabled nodes - if node["disable"].value(): - continue - # add group name if not already added - if lg_name not in captured_groups: - captured_groups.append(lg_name) - return captured_groups From 19639fb2fc46ca6d2fa3899d47b6ce9fa8384e84 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Nov 2024 18:30:56 +0100 Subject: [PATCH 084/153] Transformed CelAction to AbstractSubmitDeadline Untested! --- .../celaction/submit_celaction_deadline.py | 244 +++--------------- 1 file changed, 40 insertions(+), 204 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py b/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py index e9313e3f2f..7c77fc127f 100644 --- a/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py +++ b/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py @@ -1,13 +1,23 @@ import os import re -import json -import getpass import pyblish.api +import attr -from ayon_deadline.abstract_submit_deadline import requests_post +from ayon_deadline import abstract_submit_deadline -class CelactionSubmitDeadline(pyblish.api.InstancePlugin): +@attr.s +class CelactionPluginInfo(): + SceneFile = attr.ib(default=None) + OutputFilePath = attr.ib(default=None) + Output = attr.ib(default=None) + StartupDirectory = attr.ib(default=None) + Arguments = attr.ib(default=None) + ProjectPath = attr.ib(default=None) + AWSAssetFile0 = attr.ib(default=None) + + +class CelactionSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): """Submit CelAction2D scene to Deadline Renders are submitted to a Deadline Web Service. @@ -18,99 +28,37 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.1 hosts = ["celaction"] families = ["render.farm"] - settings_category = "deadline" - - deadline_department = "" - deadline_priority = 50 - deadline_pool = "" - deadline_pool_secondary = "" - deadline_group = "" - deadline_chunk_size = 1 - deadline_job_delay = "00:00:08:00" - def process(self, instance): + def get_job_info(self, job_info=None): + job_info.Plugin = "CelAction" - context = instance.context + # already collected explicit values for rendered Frames + if not job_info.Frames: + # Deadline requires integers in frame range + frame_range = "{}-{}".format( + int(round(self._instance.data["frameStart"])), + int(round(self._instance.data["frameEnd"]))) + job_info.Frames = frame_range - deadline_url = instance.data["deadline"]["url"] - assert deadline_url, "Requires Deadline Webservice URL" + return job_info - self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = instance.data["comment"] - self._deadline_user = context.data.get( - "deadlineUser", getpass.getuser()) - self._frame_start = int(instance.data["frameStart"]) - self._frame_end = int(instance.data["frameEnd"]) + def get_plugin_info(self): + plugin_info = CelactionPluginInfo() + instance = self._instance - # get output path - render_path = instance.data['path'] - script_path = context.data["currentFile"] + render_path = instance.data["path"] + render_dir = os.path.dirname(render_path) - response = self.payload_submit(instance, - script_path, - render_path - ) - # Store output dir for unified publisher (filesequence) - instance.data["deadlineSubmissionJob"] = response.json() + self._expected_files(instance, render_path) - instance.data["outputDir"] = os.path.dirname( - render_path).replace("\\", "/") - - instance.data["publishJobState"] = "Suspended" - - # adding 2d render specific family for version identification in Loader - instance.data["families"] = ["render2d"] + script_path = self.scene_path + plugin_info.SceneFile = script_path + plugin_info.ProjectPath = script_path + plugin_info.OutputFilePath = render_dir.replace("\\", "/") + plugin_info.StartupDirectory = "" - def payload_submit(self, - instance, - script_path, - render_path - ): resolution_width = instance.data["resolutionWidth"] resolution_height = instance.data["resolutionHeight"] - render_dir = os.path.normpath(os.path.dirname(render_path)) - render_path = os.path.normpath(render_path) - script_name = os.path.basename(script_path) - - anatomy = instance.context.data["anatomy"] - publish_template = anatomy.get_template_item( - "publish", "default", "path" - ) - for item in instance.context: - if "workfile" in item.data["productType"]: - msg = "Workfile (scene) must be published along" - assert item.data["publish"] is True, msg - - template_data = item.data.get("anatomyData") - rep = item.data.get("representations")[0].get("name") - template_data["representation"] = rep - template_data["ext"] = rep - template_data["comment"] = None - template_filled = publish_template.format_strict( - template_data - ) - script_path = os.path.normpath(template_filled) - - self.log.info( - "Using published scene for render {}".format(script_path) - ) - - jobname = "%s - %s" % (script_name, instance.name) - - output_filename_0 = self.preview_fname(render_path) - - try: - # Ensure render folder exists - os.makedirs(render_dir) - except OSError: - pass - - # define chunk and priority - chunk_size = instance.context.data.get("chunk") - if not chunk_size: - chunk_size = self.deadline_chunk_size - - # search for %02d pattern in name, and padding number search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups() split_patern = "".join(search_results) padding_number = int(search_results[1]) @@ -128,126 +76,14 @@ def payload_submit(self, f"-= AbsoluteFrameNumber=on -= PadDigits={padding_number}", "-= ClearAttachment=on", ] + plugin_info.Arguments = " ".join(args) - payload = { - "JobInfo": { - # Job name, as seen in Monitor - "Name": jobname, - - # plugin definition - "Plugin": "CelAction", - - # Top-level group name - "BatchName": script_name, - - # Arbitrary username, for visualisation in Monitor - "UserName": self._deadline_user, - - "Department": self.deadline_department, - "Priority": self.deadline_priority, - - "Group": self.deadline_group, - "Pool": self.deadline_pool, - "SecondaryPool": self.deadline_pool_secondary, - "ChunkSize": chunk_size, - - "Frames": f"{self._frame_start}-{self._frame_end}", - "Comment": self._comment, - - # Optional, enable double-click to preview rendered - # frames from Deadline Monitor - "OutputFilename0": output_filename_0.replace("\\", "/"), - - # # Asset dependency to wait for at least - # the scene file to sync. - # "AssetDependency0": script_path - "ScheduledType": "Once", - "JobDelay": self.deadline_job_delay - }, - "PluginInfo": { - # Input - "SceneFile": script_path, - - # Output directory - "OutputFilePath": render_dir.replace("\\", "/"), - - # Plugin attributes - "StartupDirectory": "", - "Arguments": " ".join(args), - - # Resolve relative references - "ProjectPath": script_path, - "AWSAssetFile0": render_path, - }, - - # Mandatory for Deadline, may be empty - "AuxFiles": [] - } - - plugin = payload["JobInfo"]["Plugin"] - self.log.debug("using render plugin : {}".format(plugin)) - - self.log.debug("Submitting..") - self.log.debug(json.dumps(payload, indent=4, sort_keys=True)) - - # adding expectied files to instance.data - self.expected_files(instance, render_path) - self.log.debug("__ expectedFiles: `{}`".format( - instance.data["expectedFiles"])) - auth = instance.data["deadline"]["auth"] - verify = instance.data["deadline"]["verify"] - response = requests_post(self.deadline_url, json=payload, - auth=auth, - verify=verify) - - if not response.ok: - self.log.error( - "Submission failed! [{}] {}".format( - response.status_code, response.content)) - self.log.debug(payload) - raise SystemExit(response.text) - - return response - - def preflight_check(self, instance): - """Ensure the startFrame, endFrame and byFrameStep are integers""" - - for key in ("frameStart", "frameEnd"): - value = instance.data[key] - - if int(value) == value: - continue - - self.log.warning( - "%f=%d was rounded off to nearest integer" - % (value, int(value)) - ) - - def preview_fname(self, path): - """Return output file path with #### for padding. - - Deadline requires the path to be formatted with # in place of numbers. - For example `/path/to/render.####.png` - - Args: - path (str): path to rendered images - - Returns: - str - - """ - self.log.debug("_ path: `{}`".format(path)) - if "%" in path: - search_results = re.search(r"[._](%0)(\d)(d)[._]", path).groups() - split_patern = "".join(search_results) - split_path = path.split(split_patern) - hashes = "#" * int(search_results[1]) - return "".join([split_path[0], hashes, split_path[-1]]) + # adding 2d render specific family for version identification in Loader + instance.data["families"] = ["render2d"] - self.log.debug("_ path: `{}`".format(path)) - return path + return attr.asdict(plugin_info) - def expected_files(self, instance, filepath): + def _expected_files(self, instance, filepath): """ Create expected files in instance data """ if not instance.data.get("expectedFiles"): From 389fef8665dcbdae8989172a360fd7cf7d893d56 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 6 Nov 2024 19:02:01 +0100 Subject: [PATCH 085/153] Replaced attr with dataclass --- .../submit_aftereffects_deadline.py | 26 +++---- .../blender/submit_blender_deadline.py | 14 ++-- .../celaction/submit_celaction_deadline.py | 22 +++--- .../publish/fusion/submit_fusion_deadline.py | 17 ++-- .../houdini/submit_houdini_cache_deadline.py | 26 +++---- .../houdini/submit_houdini_render_deadline.py | 78 +++++++++---------- .../publish/max/submit_max_deadline.py | 14 ++-- .../publish/maya/submit_maya_deadline.py | 63 ++++++++------- .../publish/nuke/submit_nuke_deadline.py | 21 +++-- .../publish/unreal/submit_unreal_deadline.py | 32 ++++---- 10 files changed, 154 insertions(+), 159 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py index b3e638dd8c..66eb578117 100644 --- a/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py +++ b/client/ayon_deadline/plugins/publish/aftereffects/submit_aftereffects_deadline.py @@ -1,6 +1,6 @@ import os -import attr import pyblish.api +from dataclasses import dataclass, field, asdict from ayon_core.lib import ( env_value_to_bool, @@ -9,18 +9,18 @@ from ayon_deadline import abstract_submit_deadline -@attr.s +@dataclass class DeadlinePluginInfo(): - Comp = attr.ib(default=None) - SceneFile = attr.ib(default=None) - OutputFilePath = attr.ib(default=None) - Output = attr.ib(default=None) - StartupDirectory = attr.ib(default=None) - Arguments = attr.ib(default=None) - ProjectPath = attr.ib(default=None) - AWSAssetFile0 = attr.ib(default=None) - Version = attr.ib(default=None) - MultiProcess = attr.ib(default=None) + Comp: str = field(default=None) + SceneFile: str = field(default=None) + OutputFilePath: str = field(default=None) + Output: str = field(default=None) + StartupDirectory: str = field(default=None) + Arguments: str = field(default=None) + ProjectPath: str = field(default=None) + AWSAssetFile0: str = field(default=None) + Version: str = field(default=None) + MultiProcess: str = field(default=None) class AfterEffectsSubmitDeadline( @@ -73,7 +73,7 @@ def get_plugin_info(self): deadline_plugin_info.SceneFile = self.scene_path deadline_plugin_info.Output = render_path.replace("\\", "/") - return attr.asdict(deadline_plugin_info) + return asdict(deadline_plugin_info) def from_published_scene(self): """ Do not overwrite expected files. diff --git a/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py b/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py index 1d4c943931..69733ae93c 100644 --- a/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py +++ b/client/ayon_deadline/plugins/publish/blender/submit_blender_deadline.py @@ -2,7 +2,7 @@ """Submitting render job to Deadline.""" import os -import attr +from dataclasses import dataclass, field, asdict from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.pipeline.farm.tools import iter_expected_files @@ -10,11 +10,11 @@ from ayon_deadline import abstract_submit_deadline -@attr.s -class BlenderPluginInfo(): - SceneFile = attr.ib(default=None) # Input - Version = attr.ib(default=None) # Mandatory for Deadline - SaveFile = attr.ib(default=True) +@dataclass +class BlenderPluginInfo: + SceneFile: str = field(default=None) # Input + Version: str = field(default=None) # Mandatory for Deadline + SaveFile: bool = field(default=True) class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, @@ -48,7 +48,7 @@ def get_plugin_info(self): SaveFile=True, ) - plugin_payload = attr.asdict(plugin_info) + plugin_payload = asdict(plugin_info) return plugin_payload diff --git a/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py b/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py index 7c77fc127f..c11773d1f6 100644 --- a/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py +++ b/client/ayon_deadline/plugins/publish/celaction/submit_celaction_deadline.py @@ -1,20 +1,20 @@ import os import re import pyblish.api -import attr +from dataclasses import dataclass, field, asdict from ayon_deadline import abstract_submit_deadline -@attr.s -class CelactionPluginInfo(): - SceneFile = attr.ib(default=None) - OutputFilePath = attr.ib(default=None) - Output = attr.ib(default=None) - StartupDirectory = attr.ib(default=None) - Arguments = attr.ib(default=None) - ProjectPath = attr.ib(default=None) - AWSAssetFile0 = attr.ib(default=None) +@dataclass +class CelactionPluginInfo: + SceneFile: str = field(default=None) + OutputFilePath: str = field(default=None) + Output: str = field(default=None) + StartupDirectory: str = field(default=None) + Arguments: str = field(default=None) + ProjectPath: str = field(default=None) + AWSAssetFile0: str = field(default=None) class CelactionSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline): @@ -81,7 +81,7 @@ def get_plugin_info(self): # adding 2d render specific family for version identification in Loader instance.data["families"] = ["render2d"] - return attr.asdict(plugin_info) + return asdict(plugin_info) def _expected_files(self, instance, filepath): """ Create expected files in instance data diff --git a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py index 2a84934679..b925bd1339 100644 --- a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py +++ b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py @@ -1,26 +1,25 @@ import os - +from dataclasses import dataclass, field, asdict import pyblish.api -import attr from ayon_core.pipeline.publish import AYONPyblishPluginMixin from ayon_core.pipeline.farm.tools import iter_expected_files from ayon_deadline import abstract_submit_deadline -@attr.s +@dataclass class FusionPluginInfo: - FlowFile: str = attr.ib(default=None) # Input - Version: str = attr.ib(default=None) # Mandatory for Deadline + FlowFile: str = field(default=None) # Input + Version: str = field(default=None) # Mandatory for Deadline # Render in high quality - HighQuality: bool = attr.ib(default=True) + HighQuality: bool = field(default=True) # Whether saver output should be checked after rendering # is complete - CheckOutput: bool = attr.ib(default=True) + CheckOutput: bool = field(default=True) # Proxy: higher numbers smaller images for faster test renders # 1 = no proxy quality - Proxy: int = attr.ib(default=1) + Proxy: int = field(default=1) class FusionSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, @@ -124,5 +123,5 @@ def get_plugin_info(self): FlowFile=self.scene_path, Version=str(instance.data["app_version"]), ) - plugin_payload: dict = attr.asdict(plugin_info) + plugin_payload: dict = asdict(plugin_info) return plugin_payload diff --git a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py index 1b5281d16b..3dafeed546 100644 --- a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py +++ b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py @@ -1,7 +1,7 @@ import os from datetime import datetime -import attr +from dataclasses import dataclass, field, asdict import pyblish.api from ayon_core.lib import ( is_in_tests, @@ -12,17 +12,17 @@ from ayon_deadline import abstract_submit_deadline -@attr.s -class HoudiniPluginInfo(object): - Build = attr.ib(default=None) - IgnoreInputs = attr.ib(default=True) - ScriptJob = attr.ib(default=True) - SceneFile = attr.ib(default=None) # Input - SaveFile = attr.ib(default=True) - ScriptFilename = attr.ib(default=None) - OutputDriver = attr.ib(default=None) - Version = attr.ib(default=None) # Mandatory for Deadline - ProjectPath = attr.ib(default=None) +@dataclass +class HoudiniPluginInfo: + Build: str = field(default=None) + IgnoreInputs: bool = field(default=True) + ScriptJob: bool = field(default=True) + SceneFile: bool = field(default=None) # Input + SaveFile: bool = field(default=True) + ScriptFilename: str = field(default=None) + OutputDriver: str = field(default=None) + Version: str = field(default=None) # Mandatory for Deadline + ProjectPath: str = field(default=None) class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, # noqa @@ -93,7 +93,7 @@ def get_plugin_info(self): ProjectPath=os.path.dirname(self.scene_path) ) - plugin_payload = attr.asdict(plugin_info) + plugin_payload = asdict(plugin_info) return plugin_payload diff --git a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py index 0e8997313f..ef9bca7353 100644 --- a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py +++ b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_render_deadline.py @@ -1,5 +1,5 @@ import os -import attr +from dataclasses import dataclass, field, asdict from datetime import datetime import pyblish.api @@ -13,63 +13,63 @@ from ayon_deadline import abstract_submit_deadline -@attr.s -class DeadlinePluginInfo(): - SceneFile = attr.ib(default=None) - OutputDriver = attr.ib(default=None) - Version = attr.ib(default=None) - IgnoreInputs = attr.ib(default=True) +@dataclass +class DeadlinePluginInfo: + SceneFile: str = field(default=None) + OutputDriver: str = field(default=None) + Version: str = field(default=None) + IgnoreInputs: bool = field(default=True) -@attr.s -class ArnoldRenderDeadlinePluginInfo(): - InputFile = attr.ib(default=None) - Verbose = attr.ib(default=4) +@dataclass +class ArnoldRenderDeadlinePluginInfo: + InputFile: str = field(default=None) + Verbose: int = field(default=4) -@attr.s -class MantraRenderDeadlinePluginInfo(): - SceneFile = attr.ib(default=None) - Version = attr.ib(default=None) +@dataclass +class MantraRenderDeadlinePluginInfo: + SceneFile: str = field(default=None) + Version: str = field(default=None) -@attr.s -class VrayRenderPluginInfo(): - InputFilename = attr.ib(default=None) - SeparateFilesPerFrame = attr.ib(default=True) +@dataclass +class VrayRenderPluginInfo: + InputFilename: str = field(default=None) + SeparateFilesPerFrame: bool = field(default=True) -@attr.s -class RedshiftRenderPluginInfo(): - SceneFile = attr.ib(default=None) +@dataclass +class RedshiftRenderPluginInfo: + SceneFile: str = field(default=None) # Use "1" as the default Redshift version just because it # default fallback version in Deadline's Redshift plugin # if no version was specified - Version = attr.ib(default="1") + Version: str = field(default="1") -@attr.s -class HuskStandalonePluginInfo(): +@dataclass +class HuskStandalonePluginInfo: """Requires Deadline Husk Standalone Plugin. See Deadline Plug-in: https://github.com/BigRoy/HuskStandaloneSubmitter Also see Husk options here: https://www.sidefx.com/docs/houdini/ref/utils/husk.html """ - SceneFile = attr.ib() + SceneFile: str = field() # TODO: Below parameters are only supported by custom version of the plugin - Renderer = attr.ib(default=None) - RenderSettings = attr.ib(default="/Render/rendersettings") - Purpose = attr.ib(default="geometry,render") - Complexity = attr.ib(default="veryhigh") - Snapshot = attr.ib(default=-1) - LogLevel = attr.ib(default="2") - PreRender = attr.ib(default="") - PreFrame = attr.ib(default="") - PostFrame = attr.ib(default="") - PostRender = attr.ib(default="") - RestartDelegate = attr.ib(default="") - Version = attr.ib(default="") + Renderer: str = field(default=None) + RenderSettings: str = field(default="/Render/rendersettings") + Purpose: str = field(default="geometry,render") + Complexity: str = field(default="veryhigh") + Snapshot: int = field(default=-1) + LogLevel: str = field(default="2") + PreRender: str = field(default="") + PreFrame: str = field(default="") + PostFrame: str = field(default="") + PostRender: str = field(default="") + RestartDelegate: str = field(default="") + Version: str = field(default="") class HoudiniSubmitDeadline( @@ -292,7 +292,7 @@ def get_plugin_info(self, job_type=None): IgnoreInputs=True ) - return attr.asdict(plugin_info) + return asdict(plugin_info) def process(self, instance): if not instance.data["farm"]: diff --git a/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py b/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py index 0739da0b93..f2e20322b0 100644 --- a/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py +++ b/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py @@ -1,6 +1,6 @@ import os import copy -import attr +from dataclasses import dataclass, field, asdict from ayon_core.pipeline import ( AYONPyblishPluginMixin @@ -17,12 +17,12 @@ from ayon_deadline import abstract_submit_deadline -@attr.s +@dataclass class MaxPluginInfo(object): - SceneFile = attr.ib(default=None) # Input - Version = attr.ib(default=None) # Mandatory for Deadline - SaveFile = attr.ib(default=True) - IgnoreInputs = attr.ib(default=True) + SceneFile: str = field(default=None) # Input + Version: str = field(default=None) # Mandatory for Deadline + SaveFile: bool = field(default=True) + IgnoreInputs: bool = field(default=True) class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, @@ -64,7 +64,7 @@ def get_plugin_info(self): IgnoreInputs=True ) - plugin_payload = attr.asdict(plugin_info) + plugin_payload = asdict(plugin_info) return plugin_payload diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index c2c06d4491..c23bb64686 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -24,8 +24,7 @@ from datetime import datetime import itertools from collections import OrderedDict - -import attr +from dataclasses import dataclass, field, asdict from ayon_core.pipeline import ( AYONPyblishPluginMixin @@ -51,44 +50,44 @@ def _validate_deadline_bool_value(instance, attribute, value): ) -@attr.s +@dataclass class MayaPluginInfo(object): - SceneFile = attr.ib(default=None) # Input - OutputFilePath = attr.ib(default=None) # Output directory and filename - OutputFilePrefix = attr.ib(default=None) - Version = attr.ib(default=None) # Mandatory for Deadline - UsingRenderLayers = attr.ib(default=True) - RenderLayer = attr.ib(default=None) # Render only this layer - Renderer = attr.ib(default=None) - ProjectPath = attr.ib(default=None) # Resolve relative references + SceneFile: str = field(default=None) # Input + OutputFilePath: str = field(default=None) # Output directory and filename + OutputFilePrefix: str = field(default=None) + Version: str = field(default=None) # Mandatory for Deadline + UsingRenderLayers: bool = field(default=True) + RenderLayer: str = field(default=None) # Render only this layer + Renderer: str = field(default=None) + ProjectPath: str = field(default=None) # Resolve relative references # Include all lights flag - RenderSetupIncludeLights = attr.ib( + RenderSetupIncludeLights: str = field( default="1", validator=_validate_deadline_bool_value) - StrictErrorChecking = attr.ib(default=True) + StrictErrorChecking: bool = field(default=True) -@attr.s +@dataclass class PythonPluginInfo(object): - ScriptFile = attr.ib() - Version = attr.ib(default="3.6") - Arguments = attr.ib(default=None) - SingleFrameOnly = attr.ib(default=None) + ScriptFile: str = field() + Version: str = field(default="3.6") + Arguments: str = field(default=None) + SingleFrameOnly: str = field(default=None) -@attr.s +@dataclass class VRayPluginInfo(object): - InputFilename = attr.ib(default=None) # Input - SeparateFilesPerFrame = attr.ib(default=None) - VRayEngine = attr.ib(default="V-Ray") - Width = attr.ib(default=None) - Height = attr.ib(default=None) # Mandatory for Deadline - OutputFilePath = attr.ib(default=True) - OutputFileName = attr.ib(default=None) # Render only this layer + InputFilename: str = field(default=None) # Input + SeparateFilesPerFrame: str = field(default=None) + VRayEngine: str = field(default="V-Ray") + Width: str = field(default=None) + Height: str = field(default=None) # Mandatory for Deadline + OutputFilePath: str = field(default=None) + OutputFileName: str = field(default=None) # Render only this layer -@attr.s +@dataclass class ArnoldPluginInfo(object): - ArnoldFile = attr.ib(default=None) + ArnoldFile: str = field(default=None) class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, @@ -153,7 +152,7 @@ def get_plugin_info(self): StrictErrorChecking=strict_error_checking ) - plugin_payload = attr.asdict(plugin_info) + plugin_payload = asdict(plugin_info) return plugin_payload @@ -508,7 +507,7 @@ def _get_vray_export_payload(self, data): "OutputFilePath": os.path.dirname(vray_scene) } - return job_info, attr.asdict(plugin_info) + return job_info, asdict(plugin_info) def _get_vray_render_payload(self, data): @@ -529,7 +528,7 @@ def _get_vray_render_payload(self, data): OutputFileName=job_info.OutputFilename[0] ) - return job_info, attr.asdict(plugin_info) + return job_info, asdict(plugin_info) def _get_arnold_render_payload(self, data): # Job Info @@ -546,7 +545,7 @@ def _get_arnold_render_payload(self, data): ArnoldFile=ass_filepath ) - return job_info, attr.asdict(plugin_info) + return job_info, asdict(plugin_info) def format_vray_output_filename(self): """Format the expected output file of the Export job. diff --git a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py index 45817d60a4..c7b8b303e4 100644 --- a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py +++ b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py @@ -1,6 +1,6 @@ import os import re -import attr +from dataclasses import dataclass, field, asdict import pyblish.api @@ -10,17 +10,14 @@ from ayon_deadline import abstract_submit_deadline -@attr.s +@dataclass class NukePluginInfo: - SceneFile: str = attr.ib(default=None) # Input - Version: str = attr.ib(default=None) # Mandatory for Deadline - # Mandatory for Deadline - - ProjectPath: str = attr.ib(default=None) - OutputFilePath: str = attr.ib(default=None) - # Use GPU - UseGpu: bool = attr.ib(default=True) - WriteNode: str = attr.ib(default=None) + SceneFile: str = field(default=None) # Input + Version: str = field(default=None) # Mandatory for Deadline + ProjectPath: str = field(default=None) + OutputFilePath: str = field(default=None) + UseGpu: bool = field(default=True) + WriteNode: str = field(default=None) class NukeSubmitDeadline( @@ -150,7 +147,7 @@ def get_plugin_info( WriteNode=write_node_name ) - plugin_payload: dict = attr.asdict(plugin_info) + plugin_payload: dict = asdict(plugin_info) return plugin_payload def _get_limit_groups(self, limit_groups): diff --git a/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py b/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py index 79a6eb1dac..76ecbd67c8 100644 --- a/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py +++ b/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py @@ -1,5 +1,5 @@ import os -import attr +from dataclasses import dataclass, field, asdict import pyblish.api from datetime import datetime from pathlib import Path @@ -9,20 +9,20 @@ from ayon_deadline import abstract_submit_deadline -@attr.s -class DeadlinePluginInfo(): - ProjectFile = attr.ib(default=None) - EditorExecutableName = attr.ib(default=None) - EngineVersion = attr.ib(default=None) - CommandLineMode = attr.ib(default=True) - OutputFilePath = attr.ib(default=None) - Output = attr.ib(default=None) - StartupDirectory = attr.ib(default=None) - CommandLineArguments = attr.ib(default=None) - MultiProcess = attr.ib(default=None) - PerforceStream = attr.ib(default=None) - PerforceChangelist = attr.ib(default=None) - PerforceGamePath = attr.ib(default=None) +@dataclass +class DeadlinePluginInfo: + ProjectFile: str = field(default=None) + EditorExecutableName: str = field(default=None) + EngineVersion: str = field(default=None) + CommandLineMode: str = field(default=True) + OutputFilePath: str = field(default=None) + Output: str = field(default=None) + StartupDirectory: str = field(default=None) + CommandLineArguments: str = field(default=None) + MultiProcess: bool = field(default=None) + PerforceStream: str = field(default=None) + PerforceChangelist: str = field(default=None) + PerforceGamePath: str = field(default=None) class UnrealSubmitDeadline( @@ -102,7 +102,7 @@ def get_plugin_info(self): deadline_plugin_info, ) - return attr.asdict(deadline_plugin_info) + return asdict(deadline_plugin_info) def from_published_scene(self): """ Do not overwrite expected files. From 051d8e2b2e257bb0bc3a0f87ae76aee19c80898a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:00:12 +0100 Subject: [PATCH 086/153] Refactored names of getters Added typing --- client/ayon_deadline/addon.py | 76 ++++++++++++------- .../plugins/publish/global/collect_jobinfo.py | 8 +- .../publish/global/validate_deadline_pools.py | 8 +- 3 files changed, 56 insertions(+), 36 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index e5ee0d483c..c1ed24696f 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -3,6 +3,7 @@ import requests import six +from typing import Optional, Iterable, Pattern, Union, List, Tuple from ayon_core.lib import Logger from ayon_core.addon import AYONAddon, IPluginPaths @@ -38,10 +39,10 @@ def initialize(self, studio_settings): self.deadline_servers_info = deadline_servers_info - self._pools_per_server = {} - self._limit_groups_per_server = {} - self._groups_per_server = {} - self._machines_per_server = {} + self._pools_by_server_name = {} + self._limit_groups_by_server_name = {} + self._groups_by_server_name = {} + self.__machines_by_server_name = {} def get_plugin_paths(self): """Deadline plugin paths.""" @@ -51,7 +52,10 @@ def get_plugin_paths(self): # abstract on the parent class return {} - def get_publish_plugin_paths(self, host_name=None): + def get_publish_plugin_paths( + self, + host_name: Optional[str] = None + ) -> List[str]: publish_dir = os.path.join(DEADLINE_ADDON_ROOT, "plugins", "publish") paths = [os.path.join(publish_dir, "global")] if host_name: @@ -59,8 +63,12 @@ def get_publish_plugin_paths(self, host_name=None): return paths @staticmethod - def get_deadline_pools(webservice, auth=None, log=None): - """Get pools from Deadline. + def get_deadline_pools( + webservice: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None + ) -> List[str]: + """Get pools from Deadline API. Args: webservice (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, @@ -77,11 +85,15 @@ def get_deadline_pools(webservice, auth=None, log=None): endpoint, auth, log, item_type="pools") @staticmethod - def get_deadline_groups(webservice, auth=None, log=None): - """Get Groups from Deadline. + def get_deadline_groups( + webservice: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None + ) -> List[str]: + """Get Groups from Deadline API. Args: webservice (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, + auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: @@ -95,11 +107,15 @@ def get_deadline_groups(webservice, auth=None, log=None): endpoint, auth, log, item_type="groups") @staticmethod - def get_deadline_limit_groups(webservice, auth=None, log=None): - """Get Limit Groups from Deadline. + def get_deadline_limit_groups( + webservice: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None + ) -> List[str]: + """Get Limit Groups from Deadline API. Args: webservice (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, + auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: @@ -113,11 +129,15 @@ def get_deadline_limit_groups(webservice, auth=None, log=None): endpoint, auth, log, item_type="limitgroups") @staticmethod - def get_deadline_workers(webservice, auth=None, log=None): - """Get Groups from Deadline. + def get_deadline_workers( + webservice: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None + ) -> List[str]: + """Get Workers (eg.machine names) from Deadline API. Args: webservice (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, + auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: @@ -155,8 +175,8 @@ def _get_deadline_info(endpoint, auth=None, log=None, item_type=None): return response.json() - def pools_per_server(self, server_name): - pools = self._pools_per_server.get(server_name) + def pools_by_server_name(self, server_name: str) -> List[str]: + pools = self._pools_by_server_name.get(server_name) if pools is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -166,12 +186,12 @@ def pools_per_server(self, server_name): dl_server_info["value"], auth ) - self._pools_per_server[server_name] = pools + self._pools_by_server_name[server_name] = pools return pools - def groups_per_server(self, server_name): - groups = self._groups_per_server.get(server_name) + def groups_by_server_name(self, server_name: str) -> List[str]: + groups = self._groups_by_server_name.get(server_name) if groups is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -181,12 +201,12 @@ def groups_per_server(self, server_name): dl_server_info["value"], auth ) - self._groups_per_server[server_name] = groups + self._groups_by_server_name[server_name] = groups return groups - def limit_groups_per_server(self, server_name): - limit_groups = self._limit_groups_per_server.get(server_name) + def limit_groups_by_server_name(self, server_name: str) -> List[str]: + limit_groups = self._limit_groups_by_server_name.get(server_name) if limit_groups is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -196,12 +216,12 @@ def limit_groups_per_server(self, server_name): dl_server_info["value"], auth ) - self._limit_groups_per_server[server_name] = limit_groups + self._limit_groups_by_server_name[server_name] = limit_groups return limit_groups - def machines_per_server(self, server_name): - machines = self._machines_per_server.get(server_name) + def machines_by_server_nameserver(self, server_name: str) -> List[str]: + machines = self.__machines_by_server_name.get(server_name) if machines is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -211,7 +231,7 @@ def machines_per_server(self, server_name): dl_server_info["value"], auth ) - self._machines_per_server[server_name] = machines + self.__machines_by_server_name[server_name] = machines return machines diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 3aa5549941..64ea927460 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -90,16 +90,16 @@ def apply_settings(cls, project_settings): addons_manager = AddonsManager() deadline_addon = addons_manager["deadline"] deadline_server_name = settings["deadline_server"] - pools = deadline_addon.pools_per_server(deadline_server_name) + pools = deadline_addon.pools_by_server_name(deadline_server_name) for pool in pools: cls.pool_enum_values.append({"value": pool, "label": pool}) - groups = deadline_addon.groups_per_server(deadline_server_name) + groups = deadline_addon.groups_by_server_name(deadline_server_name) for group in groups: cls.group_enum_values.append({"value": group, "label": group}) limit_groups = ( - deadline_addon.limit_groups_per_server(deadline_server_name)) + deadline_addon.limit_groups_by_server_name(deadline_server_name)) if not limit_groups: limit_groups.append("none") # enum cannot be empty for limit_group in limit_groups: @@ -107,7 +107,7 @@ def apply_settings(cls, project_settings): {"value": limit_group, "label": limit_group}) machines = ( - deadline_addon.machines_per_server(deadline_server_name)) + deadline_addon.machines_by_server_nameserver(deadline_server_name)) for machine in machines: cls.machines_enum_values.append( {"value": machine, "label": machine}) diff --git a/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py b/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py index 196fa4b183..1cefe37431 100644 --- a/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py +++ b/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py @@ -22,7 +22,7 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin, targets = ["local"] # cache - pools_per_url = {} + pools_by_url = {} def process(self, instance): if not self.is_active(instance.data): @@ -64,7 +64,7 @@ def process(self, instance): ) def get_pools(self, deadline_addon, deadline_url, auth): - if deadline_url not in self.pools_per_url: + if deadline_url not in self.pools_by_url: self.log.debug( "Querying available pools for Deadline url: {}".format( deadline_url) @@ -76,6 +76,6 @@ def get_pools(self, deadline_addon, deadline_url, auth): if "none" not in pools: pools.append("none") self.log.info("Available pools: {}".format(pools)) - self.pools_per_url[deadline_url] = pools + self.pools_by_url[deadline_url] = pools - return self.pools_per_url[deadline_url] + return self.pools_by_url[deadline_url] From 1461ffd5c8824afe6de4322ee3110f9a6003da2e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:04:06 +0100 Subject: [PATCH 087/153] Removed unneeded comment --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 4eb96ed4b7..59dc38e755 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -481,7 +481,6 @@ def process(self, instance): "intent": instance.context.data.get("intent"), "comment": instance.context.data.get("comment"), "job": render_job or None, - # do not carry over unnecessary DL info with large DeadlineJobInfo "instances": instances } From 2f58203b426b30f2bbced124967c3007622dcd11 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:06:05 +0100 Subject: [PATCH 088/153] Refactor names to more precise --- client/ayon_deadline/addon.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index c1ed24696f..6203f863af 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -64,89 +64,89 @@ def get_publish_plugin_paths( @staticmethod def get_deadline_pools( - webservice: str, + webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None ) -> List[str]: """Get pools from Deadline API. Args: - webservice (str): Server url. + webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: List[str]: Pools. Throws: - RuntimeError: If deadline webservice is unreachable. + RuntimeError: If deadline webservice_url is unreachable. """ - endpoint = "{}/api/pools?NamesOnly=true".format(webservice) + endpoint = "{}/api/pools?NamesOnly=true".format(webservice_url) return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="pools") @staticmethod def get_deadline_groups( - webservice: str, + webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None ) -> List[str]: """Get Groups from Deadline API. Args: - webservice (str): Server url. + webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: List[str]: Limit Groups. Throws: - RuntimeError: If deadline webservice is unreachable. + RuntimeError: If deadline webservice_url is unreachable. """ - endpoint = "{}/api/groups".format(webservice) + endpoint = "{}/api/groups".format(webservice_url) return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="groups") @staticmethod def get_deadline_limit_groups( - webservice: str, + webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None ) -> List[str]: """Get Limit Groups from Deadline API. Args: - webservice (str): Server url. + webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: List[str]: Limit Groups. Throws: - RuntimeError: If deadline webservice is unreachable. + RuntimeError: If deadline webservice_url is unreachable. """ - endpoint = "{}/api/limitgroups?NamesOnly=true".format(webservice) + endpoint = "{}/api/limitgroups?NamesOnly=true".format(webservice_url) return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="limitgroups") @staticmethod def get_deadline_workers( - webservice: str, + webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None ) -> List[str]: """Get Workers (eg.machine names) from Deadline API. Args: - webservice (str): Server url. + webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. Returns: List[str]: Limit Groups. Throws: - RuntimeError: If deadline webservice is unreachable. + RuntimeError: If deadline webservice_url is unreachable. """ - endpoint = "{}/api/slaves?NamesOnly=true".format(webservice) + endpoint = "{}/api/slaves?NamesOnly=true".format(webservice_url) return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="workers") From 7cdc12e06d609027418855e2202529853be17f70 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:09:00 +0100 Subject: [PATCH 089/153] Reformatted docstrings --- client/ayon_deadline/addon.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 6203f863af..129f4248ec 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -3,7 +3,7 @@ import requests import six -from typing import Optional, Iterable, Pattern, Union, List, Tuple +from typing import Optional, List, Tuple from ayon_core.lib import Logger from ayon_core.addon import AYONAddon, IPluginPaths @@ -69,15 +69,18 @@ def get_deadline_pools( log: Optional[Logger] = None ) -> List[str]: """Get pools from Deadline API. + Args: webservice_url (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, + auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. + Returns: - List[str]: Pools. - Throws: - RuntimeError: If deadline webservice_url is unreachable. + List[str]: Limit Groups. + + Raises: + RuntimeError: If deadline webservice is unreachable. """ endpoint = "{}/api/pools?NamesOnly=true".format(webservice_url) @@ -91,14 +94,17 @@ def get_deadline_groups( log: Optional[Logger] = None ) -> List[str]: """Get Groups from Deadline API. + Args: webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. + Returns: List[str]: Limit Groups. - Throws: + + Raises: RuntimeError: If deadline webservice_url is unreachable. """ @@ -113,14 +119,17 @@ def get_deadline_limit_groups( log: Optional[Logger] = None ) -> List[str]: """Get Limit Groups from Deadline API. + Args: webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. + Returns: List[str]: Limit Groups. - Throws: + + Raises: RuntimeError: If deadline webservice_url is unreachable. """ @@ -135,14 +144,17 @@ def get_deadline_workers( log: Optional[Logger] = None ) -> List[str]: """Get Workers (eg.machine names) from Deadline API. + Args: webservice_url (str): Server url. auth (Optional[Tuple[str, str]]): Tuple containing username, password log (Optional[Logger]): Logger to log errors to, if provided. + Returns: List[str]: Limit Groups. - Throws: + + Raises: RuntimeError: If deadline webservice_url is unreachable. """ From 1e398731908d38d30461f13ff2ff40f95d8a3dc3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:12:47 +0100 Subject: [PATCH 090/153] Added docstrings --- client/ayon_deadline/addon.py | 37 ++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 129f4248ec..614100b3fe 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -188,6 +188,15 @@ def _get_deadline_info(endpoint, auth=None, log=None, item_type=None): return response.json() def pools_by_server_name(self, server_name: str) -> List[str]: + """Returns dictionary of pools per DL server + + Args: + server_name (str): Deadline Server name from Project Settings. + + Returns: + Dict[str, List[str]]: {"default": ["pool1", "pool2"]} + + """ pools = self._pools_by_server_name.get(server_name) if pools is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -203,6 +212,15 @@ def pools_by_server_name(self, server_name: str) -> List[str]: return pools def groups_by_server_name(self, server_name: str) -> List[str]: + """Returns dictionary of groups per DL server + + Args: + server_name (str): Deadline Server name from Project Settings. + + Returns: + Dict[str, List[str]]: {"default": ["group1", "group2"]} + + """ groups = self._groups_by_server_name.get(server_name) if groups is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -218,6 +236,15 @@ def groups_by_server_name(self, server_name: str) -> List[str]: return groups def limit_groups_by_server_name(self, server_name: str) -> List[str]: + """Returns dictionary of limit groups per DL server + + Args: + server_name (str): Deadline Server name from Project Settings. + + Returns: + Dict[str, List[str]]: {"default": ["limit1", "limit2"]} + + """ limit_groups = self._limit_groups_by_server_name.get(server_name) if limit_groups is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -233,6 +260,15 @@ def limit_groups_by_server_name(self, server_name: str) -> List[str]: return limit_groups def machines_by_server_nameserver(self, server_name: str) -> List[str]: + """Returns dictionary of machines/workers per DL server + + Args: + server_name (str): Deadline Server name from Project Settings. + + Returns: + Dict[str, List[str]]: {"default": ["renderNode1", "PC1"]} + + """ machines = self.__machines_by_server_name.get(server_name) if machines is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -246,4 +282,3 @@ def machines_by_server_nameserver(self, server_name: str) -> List[str]: self.__machines_by_server_name[server_name] = machines return machines - From 318fac03c5d0581815612f6b11b6369234464050 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:21:04 +0100 Subject: [PATCH 091/153] Changed methods to classmethod --- client/ayon_deadline/addon.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 614100b3fe..09bc9758a4 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -62,8 +62,9 @@ def get_publish_plugin_paths( paths.append(os.path.join(publish_dir, host_name)) return paths - @staticmethod + @classmethod def get_deadline_pools( + cls, webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None @@ -87,8 +88,9 @@ def get_deadline_pools( return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="pools") - @staticmethod + @classmethod def get_deadline_groups( + cls, webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None @@ -112,8 +114,9 @@ def get_deadline_groups( return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="groups") - @staticmethod + @classmethod def get_deadline_limit_groups( + cls, webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None @@ -137,8 +140,9 @@ def get_deadline_limit_groups( return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="limitgroups") - @staticmethod + @classmethod def get_deadline_workers( + cls, webservice_url: str, auth: Optional[Tuple[str, str]] = None, log: Optional[Logger] = None @@ -162,8 +166,14 @@ def get_deadline_workers( return DeadlineAddon._get_deadline_info( endpoint, auth, log, item_type="workers") - @staticmethod - def _get_deadline_info(endpoint, auth=None, log=None, item_type=None): + @classmethod + def _get_deadline_info( + cls, + endpoint, + auth=None, + log=None, + item_type=None + ): from .abstract_submit_deadline import requests_get if not log: From d6378a33e31b3b9bf0bc5207af011bd5f9af2f72 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:21:29 +0100 Subject: [PATCH 092/153] Changed methods to staticmethod --- client/ayon_deadline/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index aa31467110..3b5c906d6a 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -402,8 +402,8 @@ def to_json(self) -> str: """Serialize the dataclass instance to a JSON string.""" return json.dumps(asdict(self)) - @classmethod - def _sanitize(cls, value) -> str: + @staticmethod + def _sanitize(value) -> str: if isinstance(value, str): if value == "none": return None From 3cbecec896b33132baf5cb378d53d541d85b0425 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:22:24 +0100 Subject: [PATCH 093/153] Fix typo --- client/ayon_deadline/addon.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 09bc9758a4..767d872676 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -42,7 +42,7 @@ def initialize(self, studio_settings): self._pools_by_server_name = {} self._limit_groups_by_server_name = {} self._groups_by_server_name = {} - self.__machines_by_server_name = {} + self._machines_by_server_name = {} def get_plugin_paths(self): """Deadline plugin paths.""" @@ -279,7 +279,7 @@ def machines_by_server_nameserver(self, server_name: str) -> List[str]: Dict[str, List[str]]: {"default": ["renderNode1", "PC1"]} """ - machines = self.__machines_by_server_name.get(server_name) + machines = self._machines_by_server_name.get(server_name) if machines is None: dl_server_info = self.deadline_servers_info.get(server_name) @@ -289,6 +289,6 @@ def machines_by_server_nameserver(self, server_name: str) -> List[str]: dl_server_info["value"], auth ) - self.__machines_by_server_name[server_name] = machines + self._machines_by_server_name[server_name] = machines return machines From 675a3358a1eb6e289fd219d4f62b93e7213ac74e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:25:04 +0100 Subject: [PATCH 094/153] Refactored getter names --- client/ayon_deadline/addon.py | 8 ++++---- .../plugins/publish/global/collect_jobinfo.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 767d872676..020754517b 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -197,7 +197,7 @@ def _get_deadline_info( return response.json() - def pools_by_server_name(self, server_name: str) -> List[str]: + def get_pools_by_server_name(self, server_name: str) -> List[str]: """Returns dictionary of pools per DL server Args: @@ -221,7 +221,7 @@ def pools_by_server_name(self, server_name: str) -> List[str]: return pools - def groups_by_server_name(self, server_name: str) -> List[str]: + def get_groups_by_server_name(self, server_name: str) -> List[str]: """Returns dictionary of groups per DL server Args: @@ -245,7 +245,7 @@ def groups_by_server_name(self, server_name: str) -> List[str]: return groups - def limit_groups_by_server_name(self, server_name: str) -> List[str]: + def get_limit_groups_by_server_name(self, server_name: str) -> List[str]: """Returns dictionary of limit groups per DL server Args: @@ -269,7 +269,7 @@ def limit_groups_by_server_name(self, server_name: str) -> List[str]: return limit_groups - def machines_by_server_nameserver(self, server_name: str) -> List[str]: + def get_machines_by_server_nameserver(self, server_name: str) -> List[str]: """Returns dictionary of machines/workers per DL server Args: diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 64ea927460..7951bafc1e 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -90,16 +90,16 @@ def apply_settings(cls, project_settings): addons_manager = AddonsManager() deadline_addon = addons_manager["deadline"] deadline_server_name = settings["deadline_server"] - pools = deadline_addon.pools_by_server_name(deadline_server_name) + pools = deadline_addon.get_pools_by_server_name(deadline_server_name) for pool in pools: cls.pool_enum_values.append({"value": pool, "label": pool}) - groups = deadline_addon.groups_by_server_name(deadline_server_name) + groups = deadline_addon.get_groups_by_server_name(deadline_server_name) for group in groups: cls.group_enum_values.append({"value": group, "label": group}) limit_groups = ( - deadline_addon.limit_groups_by_server_name(deadline_server_name)) + deadline_addon.get_limit_groups_by_server_name(deadline_server_name)) if not limit_groups: limit_groups.append("none") # enum cannot be empty for limit_group in limit_groups: @@ -107,7 +107,7 @@ def apply_settings(cls, project_settings): {"value": limit_group, "label": limit_group}) machines = ( - deadline_addon.machines_by_server_nameserver(deadline_server_name)) + deadline_addon.get_machines_by_server_nameserver(deadline_server_name)) for machine in machines: cls.machines_enum_values.append( {"value": machine, "label": machine}) From 0a4240050a551e9f4d0efe5b9a146d6da1756d89 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:26:16 +0100 Subject: [PATCH 095/153] Refactored use of cls --- client/ayon_deadline/addon.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 020754517b..e3c7c34fd1 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -85,7 +85,7 @@ def get_deadline_pools( """ endpoint = "{}/api/pools?NamesOnly=true".format(webservice_url) - return DeadlineAddon._get_deadline_info( + return cls._get_deadline_info( endpoint, auth, log, item_type="pools") @classmethod @@ -111,7 +111,7 @@ def get_deadline_groups( """ endpoint = "{}/api/groups".format(webservice_url) - return DeadlineAddon._get_deadline_info( + return cls._get_deadline_info( endpoint, auth, log, item_type="groups") @classmethod @@ -137,7 +137,7 @@ def get_deadline_limit_groups( """ endpoint = "{}/api/limitgroups?NamesOnly=true".format(webservice_url) - return DeadlineAddon._get_deadline_info( + return cls._get_deadline_info( endpoint, auth, log, item_type="limitgroups") @classmethod @@ -163,7 +163,7 @@ def get_deadline_workers( """ endpoint = "{}/api/slaves?NamesOnly=true".format(webservice_url) - return DeadlineAddon._get_deadline_info( + return cls._get_deadline_info( endpoint, auth, log, item_type="workers") @classmethod From 403d577764e2aec5e29a0a7281a413aa188c2f80 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 12:27:44 +0100 Subject: [PATCH 096/153] Refactored access to deadline info --- client/ayon_deadline/addon.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index e3c7c34fd1..37aed8423f 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -209,7 +209,7 @@ def get_pools_by_server_name(self, server_name: str) -> List[str]: """ pools = self._pools_by_server_name.get(server_name) if pools is None: - dl_server_info = self.deadline_servers_info.get(server_name) + dl_server_info = self.deadline_servers_info[server_name] auth = (dl_server_info["default_username"], dl_server_info["default_password"]) @@ -233,7 +233,7 @@ def get_groups_by_server_name(self, server_name: str) -> List[str]: """ groups = self._groups_by_server_name.get(server_name) if groups is None: - dl_server_info = self.deadline_servers_info.get(server_name) + dl_server_info = self.deadline_servers_info[server_name] auth = (dl_server_info["default_username"], dl_server_info["default_password"]) @@ -257,7 +257,7 @@ def get_limit_groups_by_server_name(self, server_name: str) -> List[str]: """ limit_groups = self._limit_groups_by_server_name.get(server_name) if limit_groups is None: - dl_server_info = self.deadline_servers_info.get(server_name) + dl_server_info = self.deadline_servers_info[server_name] auth = (dl_server_info["default_username"], dl_server_info["default_password"]) @@ -281,7 +281,7 @@ def get_machines_by_server_nameserver(self, server_name: str) -> List[str]: """ machines = self._machines_by_server_name.get(server_name) if machines is None: - dl_server_info = self.deadline_servers_info.get(server_name) + dl_server_info = self.deadline_servers_info[server_name] auth = (dl_server_info["default_username"], dl_server_info["default_password"]) From ba7293daa06f4e342cb57dcf8d56e8115c847fd5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 13:07:39 +0100 Subject: [PATCH 097/153] Refactored utility methods for DL api to lib file --- client/ayon_deadline/addon.py | 161 ++---------------- client/ayon_deadline/lib.py | 144 +++++++++++++++- .../publish/global/validate_deadline_pools.py | 4 +- 3 files changed, 155 insertions(+), 154 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 37aed8423f..6f5429c8c2 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -1,25 +1,21 @@ import os -import sys -import requests -import six -from typing import Optional, List, Tuple +from typing import Optional, List -from ayon_core.lib import Logger from ayon_core.addon import AYONAddon, IPluginPaths +from .lib import ( + get_deadline_workers, + get_deadline_groups, + get_deadline_limit_groups, + get_deadline_pools +) from .version import __version__ DEADLINE_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) -class DeadlineWebserviceError(Exception): - """ - Exception to throw when connection to Deadline server fails. - """ - - class DeadlineAddon(AYONAddon, IPluginPaths): name = "deadline" version = __version__ @@ -62,141 +58,6 @@ def get_publish_plugin_paths( paths.append(os.path.join(publish_dir, host_name)) return paths - @classmethod - def get_deadline_pools( - cls, - webservice_url: str, - auth: Optional[Tuple[str, str]] = None, - log: Optional[Logger] = None - ) -> List[str]: - """Get pools from Deadline API. - - Args: - webservice_url (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, - password - log (Optional[Logger]): Logger to log errors to, if provided. - - Returns: - List[str]: Limit Groups. - - Raises: - RuntimeError: If deadline webservice is unreachable. - - """ - endpoint = "{}/api/pools?NamesOnly=true".format(webservice_url) - return cls._get_deadline_info( - endpoint, auth, log, item_type="pools") - - @classmethod - def get_deadline_groups( - cls, - webservice_url: str, - auth: Optional[Tuple[str, str]] = None, - log: Optional[Logger] = None - ) -> List[str]: - """Get Groups from Deadline API. - - Args: - webservice_url (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, - password - log (Optional[Logger]): Logger to log errors to, if provided. - - Returns: - List[str]: Limit Groups. - - Raises: - RuntimeError: If deadline webservice_url is unreachable. - - """ - endpoint = "{}/api/groups".format(webservice_url) - return cls._get_deadline_info( - endpoint, auth, log, item_type="groups") - - @classmethod - def get_deadline_limit_groups( - cls, - webservice_url: str, - auth: Optional[Tuple[str, str]] = None, - log: Optional[Logger] = None - ) -> List[str]: - """Get Limit Groups from Deadline API. - - Args: - webservice_url (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, - password - log (Optional[Logger]): Logger to log errors to, if provided. - - Returns: - List[str]: Limit Groups. - - Raises: - RuntimeError: If deadline webservice_url is unreachable. - - """ - endpoint = "{}/api/limitgroups?NamesOnly=true".format(webservice_url) - return cls._get_deadline_info( - endpoint, auth, log, item_type="limitgroups") - - @classmethod - def get_deadline_workers( - cls, - webservice_url: str, - auth: Optional[Tuple[str, str]] = None, - log: Optional[Logger] = None - ) -> List[str]: - """Get Workers (eg.machine names) from Deadline API. - - Args: - webservice_url (str): Server url. - auth (Optional[Tuple[str, str]]): Tuple containing username, - password - log (Optional[Logger]): Logger to log errors to, if provided. - - Returns: - List[str]: Limit Groups. - - Raises: - RuntimeError: If deadline webservice_url is unreachable. - - """ - endpoint = "{}/api/slaves?NamesOnly=true".format(webservice_url) - return cls._get_deadline_info( - endpoint, auth, log, item_type="workers") - - @classmethod - def _get_deadline_info( - cls, - endpoint, - auth=None, - log=None, - item_type=None - ): - from .abstract_submit_deadline import requests_get - - if not log: - log = Logger.get_logger(__name__) - - try: - kwargs = {} - if auth: - kwargs["auth"] = auth - response = requests_get(endpoint, **kwargs) - except requests.exceptions.ConnectionError as exc: - msg = 'Cannot connect to DL web service {}'.format(endpoint) - log.error(msg) - six.reraise( - DeadlineWebserviceError, - DeadlineWebserviceError('{} - {}'.format(msg, exc)), - sys.exc_info()[2]) - if not response.ok: - log.warning(f"No {item_type} retrieved") - return [] - - return response.json() - def get_pools_by_server_name(self, server_name: str) -> List[str]: """Returns dictionary of pools per DL server @@ -213,7 +74,7 @@ def get_pools_by_server_name(self, server_name: str) -> List[str]: auth = (dl_server_info["default_username"], dl_server_info["default_password"]) - pools = self.get_deadline_pools( + pools = get_deadline_pools( dl_server_info["value"], auth ) @@ -237,7 +98,7 @@ def get_groups_by_server_name(self, server_name: str) -> List[str]: auth = (dl_server_info["default_username"], dl_server_info["default_password"]) - groups = self.get_deadline_groups( + groups = get_deadline_groups( dl_server_info["value"], auth ) @@ -261,7 +122,7 @@ def get_limit_groups_by_server_name(self, server_name: str) -> List[str]: auth = (dl_server_info["default_username"], dl_server_info["default_password"]) - limit_groups = self.get_deadline_limit_groups( + limit_groups = get_deadline_limit_groups( dl_server_info["value"], auth ) @@ -285,7 +146,7 @@ def get_machines_by_server_nameserver(self, server_name: str) -> List[str]: auth = (dl_server_info["default_username"], dl_server_info["default_password"]) - machines = self.get_deadline_workers( + machines = get_deadline_workers( dl_server_info["value"], auth ) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 3b5c906d6a..61d5666d9f 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,9 +1,13 @@ import os +import sys +import requests from dataclasses import dataclass, field, asdict from functools import partial -from typing import Optional, Dict, Any +from typing import Optional, List, Tuple, Any, Dict import json +from ayon_core.lib import Logger + # describes list of product typed used for plugin filtering for farm publishing FARM_FAMILIES = [ "render", "render.farm", "render.frames_farm", @@ -49,6 +53,142 @@ def get_instance_job_envs(instance) -> "dict[str, str]": return env +def get_deadline_pools( + webservice_url: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None +) -> List[str]: + """Get pools from Deadline API. + + Args: + webservice_url (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + + Returns: + List[str]: Limit Groups. + + Raises: + RuntimeError: If deadline webservice is unreachable. + + """ + endpoint = "{}/api/pools?NamesOnly=true".format(webservice_url) + return _get_deadline_info( + endpoint, auth, log, item_type="pools") + + +def get_deadline_groups( + webservice_url: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None +) -> List[str]: + """Get Groups from Deadline API. + + Args: + webservice_url (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + + Returns: + List[str]: Limit Groups. + + Raises: + RuntimeError: If deadline webservice_url is unreachable. + + """ + endpoint = "{}/api/groups".format(webservice_url) + return _get_deadline_info( + endpoint, auth, log, item_type="groups") + + +def get_deadline_limit_groups( + webservice_url: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None +) -> List[str]: + """Get Limit Groups from Deadline API. + + Args: + webservice_url (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + + Returns: + List[str]: Limit Groups. + + Raises: + RuntimeError: If deadline webservice_url is unreachable. + + """ + endpoint = "{}/api/limitgroups?NamesOnly=true".format(webservice_url) + return _get_deadline_info( + endpoint, auth, log, item_type="limitgroups") + +def get_deadline_workers( + webservice_url: str, + auth: Optional[Tuple[str, str]] = None, + log: Optional[Logger] = None +) -> List[str]: + """Get Workers (eg.machine names) from Deadline API. + + Args: + webservice_url (str): Server url. + auth (Optional[Tuple[str, str]]): Tuple containing username, + password + log (Optional[Logger]): Logger to log errors to, if provided. + + Returns: + List[str]: Limit Groups. + + Raises: + RuntimeError: If deadline webservice_url is unreachable. + + """ + endpoint = "{}/api/slaves?NamesOnly=true".format(webservice_url) + return _get_deadline_info( + endpoint, auth, log, item_type="workers") + + +def _get_deadline_info( + endpoint, + auth=None, + log=None, + item_type=None +): + from .abstract_submit_deadline import requests_get + + if not log: + log = Logger.get_logger(__name__) + + try: + kwargs = {} + if auth: + kwargs["auth"] = auth + response = requests_get(endpoint, **kwargs) + except requests.exceptions.ConnectionError as exc: + msg = 'Cannot connect to DL web service {}'.format(endpoint) + log.error(msg) + raise( + DeadlineWebserviceError, + DeadlineWebserviceError('{} - {}'.format(msg, exc)), + sys.exc_info()[2] + ) + if not response.ok: + log.warning(f"No {item_type} retrieved") + return [] + + return response.json() + + +class DeadlineWebserviceError(Exception): + """ + Exception to throw when connection to Deadline server fails. + """ + + class DeadlineKeyValueVar(dict): """ @@ -365,7 +505,7 @@ def filter_data(a, v): return serialized @classmethod - def from_dict(cls, data: Dict) -> 'AYONDeadlineJobInfo': + def from_dict(cls, data: Dict[str, Any]) -> 'AYONDeadlineJobInfo': implemented_field_values = { "ChunkSize": data["chunk_size"], diff --git a/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py b/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py index 1cefe37431..cd37ea0f34 100644 --- a/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py +++ b/client/ayon_deadline/plugins/publish/global/validate_deadline_pools.py @@ -4,7 +4,7 @@ PublishXmlValidationError, OptionalPyblishPluginMixin ) -from ayon_deadline.lib import FARM_FAMILIES +from ayon_deadline.lib import FARM_FAMILIES, get_deadline_pools class ValidateDeadlinePools(OptionalPyblishPluginMixin, @@ -69,7 +69,7 @@ def get_pools(self, deadline_addon, deadline_url, auth): "Querying available pools for Deadline url: {}".format( deadline_url) ) - pools = deadline_addon.get_deadline_pools( + pools = get_deadline_pools( deadline_url, auth=auth, log=self.log ) # some DL return "none" as a pool name From 86ddfb8ac0d6cd9253908892f8c6ad1857224787 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 13:08:54 +0100 Subject: [PATCH 098/153] Fix typo --- client/ayon_deadline/addon.py | 2 +- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/addon.py b/client/ayon_deadline/addon.py index 6f5429c8c2..a9d91c2587 100644 --- a/client/ayon_deadline/addon.py +++ b/client/ayon_deadline/addon.py @@ -130,7 +130,7 @@ def get_limit_groups_by_server_name(self, server_name: str) -> List[str]: return limit_groups - def get_machines_by_server_nameserver(self, server_name: str) -> List[str]: + def get_machines_by_server_name(self, server_name: str) -> List[str]: """Returns dictionary of machines/workers per DL server Args: diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 7951bafc1e..94948979b7 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -107,7 +107,7 @@ def apply_settings(cls, project_settings): {"value": limit_group, "label": limit_group}) machines = ( - deadline_addon.get_machines_by_server_nameserver(deadline_server_name)) + deadline_addon.get_machines_by_server_name(deadline_server_name)) for machine in machines: cls.machines_enum_values.append( {"value": machine, "label": machine}) From 6686f410ee32d0ddb2bd102c42b364202ad18545 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 13:40:10 +0100 Subject: [PATCH 099/153] Removed obsolete fields for CelAction --- server/settings/publish_plugins.py | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 4d2578f8de..60844683b6 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -315,17 +315,8 @@ class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): class CelactionSubmitDeadlineModel(BaseSettingsModel): enabled: bool = SettingsField(True, title="Enabled") - deadline_department: str = SettingsField("", title="Deadline apartment") - deadline_priority: int = SettingsField(50, title="Deadline priority") - deadline_pool: str = SettingsField("", title="Deadline pool") - deadline_pool_secondary: str = SettingsField( - "", title="Deadline pool (secondary)" - ) - deadline_group: str = SettingsField("", title="Deadline Group") - deadline_chunk_size: int = SettingsField(10, title="Deadline Chunk size") - deadline_job_delay: str = SettingsField( - "", title="Delay job (timecode dd:hh:mm:ss)" - ) + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") class BlenderSubmitDeadlineModel(BaseSettingsModel): @@ -474,13 +465,8 @@ class PublishPluginsModel(BaseSettingsModel): }, "CelactionSubmitDeadline": { "enabled": True, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10, - "deadline_job_delay": "00:00:00:00" + "optional": False, + "active": True, }, "FusionSubmitDeadline": { "enabled": True, From 118ace2e3be84defced4e6395f1dddfc8e51b86d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 14:41:26 +0100 Subject: [PATCH 100/153] Removed env_search_replace_values and env_allowed_keys It seems obsolete and not really useful to push through artist values of environment variables. Possible filtering of env vars should be handled in `extractenvironments` --- server/settings/publish_plugins.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 60844683b6..f3197c8744 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -109,16 +109,6 @@ class CollectJobInfoItem(BaseSettingsModel): True, title="Workfile Dependency") multiprocess: bool = SettingsField(False, title="Multiprocess") - env_allowed_keys: list[str] = SettingsField( - default_factory=list, - title="Allowed environment keys", - description="Pass selected environment variables with current value" - ) - env_search_replace_values: list[EnvSearchReplaceSubmodel] = SettingsField( - default_factory=list, - title="Search & replace in environment values", - description="Replace string values in 'Name' with value from 'Value'" - ) additional_job_info: str = SettingsField( "", title="Additional JobInfo data", From b8f33730daf411f87bc1fc30d2b3ad7b14b131a9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 14:56:39 +0100 Subject: [PATCH 101/153] Removed hardoced chunk, uses from Profile --- .../ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py index c7b8b303e4..7fd654bfeb 100644 --- a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py +++ b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py @@ -88,7 +88,6 @@ def process(self, instance): if instance.data.get("bakingNukeScripts"): for baking_script in instance.data["bakingNukeScripts"]: self.job_info.JobType = "Normal" - self.job_info.ChunkSize = 99999999 response_data = instance.data["deadlineSubmissionJob"] if response_data.get("_id"): From 3df6c16ba85c34e71ee062f487f72da61bd60990 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 15:30:17 +0100 Subject: [PATCH 102/153] Docstrings --- .../plugins/publish/global/collect_jobinfo.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 94948979b7..160f9f9753 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -114,6 +114,15 @@ def apply_settings(cls, project_settings): @classmethod def get_attr_defs_for_instance(cls, create_context, instance): + """Get list of attr defs that are set in Settings as artist overridable + + Args: + create_context (ayon_core.pipeline.create.CreateContext) + instance (ayon_core.pipeline.create.CreatedInstance): + + Returns: + (list) + """ if instance.product_type not in cls.families: return [] @@ -148,7 +157,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): defs.extend(cls._get_artist_overrides(overrides, profile)) - # explicit + # explicit frames to render - for test renders defs.append( TextDef( "frames", @@ -168,7 +177,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): @classmethod def _get_artist_overrides(cls, overrides, profile): - """Provide list of Defs that could be filled by artist""" + """Provide list of all possible Defs that could be filled by artist""" # should be matching to extract_jobinfo_overrides_enum default_values = {} for key in overrides: From 3ebe477459469ca80e0faefc03e4a654e8bb43d7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 7 Nov 2024 15:30:29 +0100 Subject: [PATCH 103/153] Added validator for priority --- .../global/validate_deadline_jobinfo.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py diff --git a/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py b/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py new file mode 100644 index 0000000000..d7db4f69dc --- /dev/null +++ b/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py @@ -0,0 +1,38 @@ +import pyblish.api + +from ayon_core.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_deadline.lib import FARM_FAMILIES, get_deadline_pools + + +class ValidateDeadlineJobInfo( + OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin +): + """Validate collected values for JobInfo section in Deadline submission + + """ + + label = "Validate Deadline JobInfo" + order = pyblish.api.ValidatorOrder + families = FARM_FAMILIES + optional = True + targets = ["local"] + + # cache + pools_by_url = {} + + def process(self, instance): + if not self.is_active(instance.data): + return + + if not instance.data.get("farm"): + self.log.debug("Skipping local instance.") + return + + priority = instance.data["deadline"]["job_info"].Priority + if priority < 0 or priority > 100: + raise PublishValidationError( + f"Priority:'{priority}' must be between 0-100") From 37dcd72aa48526ccebab782a29f796672418b285 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Nov 2024 11:34:16 +0100 Subject: [PATCH 104/153] Removed use_gpu from JobInfo Used only for Nuke PluginInfo --- .../publish/nuke/submit_nuke_deadline.py | 22 ++++++++++++++++--- server/settings/publish_plugins.py | 3 ++- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py index 7fd654bfeb..72232d5885 100644 --- a/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py +++ b/client/ayon_deadline/plugins/publish/nuke/submit_nuke_deadline.py @@ -4,6 +4,7 @@ import pyblish.api +from ayon_core.lib import BoolDef from ayon_core.pipeline.publish import ( AYONPyblishPluginMixin ) @@ -37,6 +38,10 @@ class NukeSubmitDeadline( families = ["render", "prerender"] optional = True targets = ["local"] + settings_category = "deadline" + + use_gpu = None + node_class_limit_groups = {} def process(self, instance): """Plugin entry point.""" @@ -124,8 +129,7 @@ def get_job_info(self, job_info=None, **kwargs): start=start_frame, end=end_frame ) - - limit_groups = self._get_limit_groups(job_info.LimitGroups or []) + limit_groups = self._get_limit_groups(self.node_class_limit_groups) job_info.LimitGroups = limit_groups return job_info @@ -136,19 +140,31 @@ def get_plugin_info( context = instance.context version = re.search(r"\d+\.\d+", context.data.get("hostVersion")) + attribute_values = self.get_attr_values_from_data(instance.data) + render_dir = os.path.dirname(render_path) plugin_info = NukePluginInfo( SceneFile=scene_path, Version=version.group(), OutputFilePath=render_dir.replace("\\", "/"), ProjectPath=scene_path, - UseGpu=True, + UseGpu=attribute_values["use_gpu"], WriteNode=write_node_name ) plugin_payload: dict = asdict(plugin_info) return plugin_payload + @classmethod + def get_attribute_defs(cls): + return [ + BoolDef( + "use_gpu", + label="Use GPU", + default=cls.use_gpu, + ), + ] + def _get_limit_groups(self, limit_groups): """Search for limit group nodes and return group name. Limit groups will be defined as pairs in Nuke deadline submitter diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index f3197c8744..235fe27960 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -97,7 +97,6 @@ class CollectJobInfoItem(BaseSettingsModel): concurrent_tasks: int = SettingsField( 1, title="Number of concurrent tasks") department: str = SettingsField("", title="Department") - use_gpu: bool = SettingsField("", title="Use GPU") job_delay: str = SettingsField( "", title="Delay job", placeholder="dd:hh:mm:ss" @@ -248,9 +247,11 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") + use_gpu: bool = SettingsField(True, title="Use GPU") node_class_limit_groups: list[LimitGroupsSubmodel] = SettingsField( default_factory=list, title="Node based Limit Groups", + description="Provide list of node types to get particular limit" ) From d2c085716d102dbbc758e0331f680a29a34c1024 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Nov 2024 11:48:08 +0100 Subject: [PATCH 105/153] Fix pool --- .../plugins/publish/maya/submit_maya_deadline.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index c23bb64686..165a3f74b8 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -340,10 +340,8 @@ def _tile_render(self, payload): self.tile_priority) assembly_job_info.TileJob = False - # TODO: This should be a new publisher attribute definition - pool = instance.context.data["project_settings"]["deadline"] - pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"] - assembly_job_info.Pool = pool or instance.data.get("primaryPool", "") + assembly_job_info.Pool = (instance.data.get("primaryPool") or + self.job_info.Pool) assembly_plugin_info = { "CleanupTiles": 1, From 8b7e43aaf3fa2ea34bece32b71da465778e4ef34 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 8 Nov 2024 11:48:08 +0100 Subject: [PATCH 106/153] Fix pool --- .../plugins/publish/maya/submit_maya_deadline.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index c23bb64686..5ca38a560a 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -100,6 +100,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, settings_category = "deadline" tile_assembler_plugin = "DraftTileAssembler" + tile_priority = 50 def get_job_info(self, job_info=None): instance = self._instance @@ -340,10 +341,7 @@ def _tile_render(self, payload): self.tile_priority) assembly_job_info.TileJob = False - # TODO: This should be a new publisher attribute definition - pool = instance.context.data["project_settings"]["deadline"] - pool = pool["publish"]["ProcessSubmittedJobOnFarm"]["deadline_pool"] - assembly_job_info.Pool = pool or instance.data.get("primaryPool", "") + assembly_job_info.Pool = self.job_info.Pool assembly_plugin_info = { "CleanupTiles": 1, From 877542f8fe8df38c7ac37e8d743ae46e67678a6f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 11:12:13 +0100 Subject: [PATCH 107/153] Reordered imports --- client/ayon_deadline/lib.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 61d5666d9f..23b9676559 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -1,10 +1,11 @@ import os import sys -import requests +import json from dataclasses import dataclass, field, asdict from functools import partial from typing import Optional, List, Tuple, Any, Dict -import json + +import requests from ayon_core.lib import Logger From 286601fe23aa4006cf42edef5d3600033255bfb8 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 11:23:54 +0100 Subject: [PATCH 108/153] Refactored logic Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/global/collect_jobinfo.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 160f9f9753..91a24c2177 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -266,13 +266,11 @@ def _get_artist_overrides(cls, overrides, profile): ) ] - override_defs = [] - for attr_def in attr_defs: - if attr_def.key not in overrides: - continue - override_defs.append(attr_def) - - return override_defs + return [ + attr_def + for attr_def in attr_defs + if attr_def.key in overrides + ] @classmethod def register_create_context_callbacks(cls, create_context): From 6816382268972541b9e031b3f067b37cc5debc6e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 11:25:22 +0100 Subject: [PATCH 109/153] Pass in project_settings --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 91a24c2177..585cd2a2ad 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -87,7 +87,7 @@ def apply_settings(cls, project_settings): cls.profiles = profiles or [] - addons_manager = AddonsManager() + addons_manager = AddonsManager(project_settings) deadline_addon = addons_manager["deadline"] deadline_server_name = settings["deadline_server"] pools = deadline_addon.get_pools_by_server_name(deadline_server_name) From 397cf34dea4a6d7bf808d1b9fac5eb4601956f01 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 11:27:37 +0100 Subject: [PATCH 110/153] Use class variable --- .../plugins/publish/global/collect_jobinfo.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 585cd2a2ad..60b88f4230 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -299,23 +299,15 @@ def _get_jobinfo_defaults(self, instance): """ context_data = instance.context.data host_name = context_data["hostName"] - project_settings = context_data["project_settings"] task_entity = context_data["taskEntity"] task_name = task_type = None if task_entity: task_name = task_entity["name"] task_type = task_entity["taskType"] - profiles = ( - project_settings - ["deadline"] - ["publish"] - ["CollectJobInfo"] - ["profiles"] - ) profile = filter_profiles( - profiles, + self.profiles, { "host_names": host_name, "task_types": task_type, From 781bab5d941e25970ba2472f4cc127c41dd44c6a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 12:45:03 +0100 Subject: [PATCH 111/153] Refactored list fields --- client/ayon_deadline/lib.py | 17 ++++++++++++----- .../plugins/publish/global/collect_jobinfo.py | 6 ++---- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 23b9676559..3700b9bb0c 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -354,13 +354,13 @@ class DeadlineJobInfo: # Machine Limit MachineLimit: Optional[int] = field(default=None) # Default: 0 MachineLimitProgress: Optional[float] = field(default=None) # Default -1.0 - Whitelist: Optional[str] = field( - default=None) # Default blank (comma-separated list) - Blacklist: Optional[str] = field( - default=None) # Default blank (comma-separated list) + Whitelist: Optional[List[str]] = field( + default_factory=list) # Default blank (comma-separated list) + Blacklist: Optional[List[str]] = field( + default_factory=list) # Default blank (comma-separated list) # Limits - LimitGroups: Optional[str] = field(default=None) # Default: blank + LimitGroups: Optional[List[str]] = field(default_factory=list) # Default: blank # Dependencies JobDependencies: Optional[str] = field(default=None) # Default: blank @@ -503,6 +503,13 @@ def filter_data(a, v): ]: serialized.update(attribute.serialize()) + for attribute_key in [ + "LimitGroups", + "Whitelist", + "Blacklist", + ]: + serialized[attribute_key] = ",".join(serialized[attribute_key]) + return serialized @classmethod diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 60b88f4230..eca52d0611 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -76,9 +76,9 @@ def _handle_machine_list(self, attr_values, job_info): machine_list = attr_values["machine_list"] if machine_list: if attr_values["machine_list_deny"]: - job_info.Blacklist = ",".join(machine_list) + job_info.Blacklist = machine_list else: - job_info.Whitelist = ",".join(machine_list) + job_info.Whitelist = machine_list @classmethod def apply_settings(cls, project_settings): @@ -194,8 +194,6 @@ def _get_artist_overrides(cls, overrides, profile): if value in cls.limit_group_enum_values: filtered.append(value) default_value = filtered - if isinstance(default_value, list): - default_value = ",".join(default_value) if key == "group" and default_value not in cls.group_enum_values: default_value = "" default_values[key] = default_value From 5596190dafd04ff7e5a95137b535b8b2d08a1234 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 12:52:24 +0100 Subject: [PATCH 112/153] Changed recalculating only on context change --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index eca52d0611..f404f4db38 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -278,6 +278,10 @@ def register_create_context_callbacks(cls, create_context): def on_values_changed(cls, event): for instance_change in event["changes"]: instance = instance_change["instance"] + #recalculate only if context changes + if not instance_change.get("productName"): + continue + if not cls.instance_matches_plugin_families(instance): continue From 10682566f97ecd3d621f5793ed93ca72cd92a199 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 15:42:14 +0100 Subject: [PATCH 113/153] Fix removing job_info Co-authored-by: Mustafa Jafar --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 59dc38e755..dd5c429b2d 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,7 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: - inst["deadline"] = instance.data["deadline"] + inst["deadline"] = instance.data["deadline"].copy() inst["deadline"].pop("job_info") # publish job file From 4b9d4875478cb577284e27d7b3dfc387bae8ec71 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 11 Nov 2024 17:49:14 +0100 Subject: [PATCH 114/153] Fix pool collection --- client/ayon_deadline/lib.py | 1 + 1 file changed, 1 insertion(+) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 3700b9bb0c..44c91cd24d 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -555,6 +555,7 @@ def _sanitize(value) -> str: if isinstance(value, str): if value == "none": return None + return value if isinstance(value, list): filtered = [] for val in value: From c4243116d290737582662f49806e93a94b9807b4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 10:55:26 +0100 Subject: [PATCH 115/153] Cleaned up plugin Settings Removed unnecessary plugins without any configuration possible. Disabled submission and collection plugins as it doesn't make sense to have them optional/disabled. --- server/settings/publish_plugins.py | 137 ++--------------------------- 1 file changed, 9 insertions(+), 128 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 235fe27960..dcc40fb2ed 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -135,7 +135,6 @@ class CollectJobInfoItem(BaseSettingsModel): class CollectJobInfoModel(BaseSettingsModel): _isGroup = True - enabled: bool = SettingsField(False) profiles: list[CollectJobInfoItem] = SettingsField(default_factory=list) @@ -177,9 +176,6 @@ class ScenePatchesSubmodel(BaseSettingsModel): class MayaSubmitDeadlineModel(BaseSettingsModel): """Maya deadline submitter settings.""" - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") import_reference: bool = SettingsField( title="Use Scene with Imported Reference" ) @@ -204,12 +200,6 @@ def validate_unique_names(cls, value): return value -class MaxSubmitDeadlineModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - def fusion_deadline_plugin_enum(): """Return a list of value/label dicts for the enumerator. @@ -229,9 +219,6 @@ def fusion_deadline_plugin_enum(): class FusionSubmitDeadlineModel(BaseSettingsModel): - enabled: bool = SettingsField(True, title="Enabled") - optional: bool = SettingsField(False, title="Optional") - active: bool = SettingsField(True, title="Active") concurrent_tasks: int = SettingsField( 1, title="Number of concurrent tasks" ) @@ -243,10 +230,6 @@ class FusionSubmitDeadlineModel(BaseSettingsModel): class NukeSubmitDeadlineModel(BaseSettingsModel): """Nuke deadline submitter settings.""" - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - use_gpu: bool = SettingsField(True, title="Use GPU") node_class_limit_groups: list[LimitGroupsSubmodel] = SettingsField( default_factory=list, @@ -255,19 +238,8 @@ class NukeSubmitDeadlineModel(BaseSettingsModel): ) -class HarmonySubmitDeadlineModel(BaseSettingsModel): - """Harmony deadline submitter settings.""" - - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - class HoudiniSubmitDeadlineModel(BaseSettingsModel): """Houdini deadline render submitter settings.""" - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") export_priority: int = SettingsField(title="Export Priority") export_chunk_size: int = SettingsField(title="Export Chunk Size") @@ -289,33 +261,6 @@ class HoudiniSubmitDeadlineModel(BaseSettingsModel): ) -class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel): - """Houdini deadline cache submitter settings.""" - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): - """After Effects deadline submitter settings.""" - - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class CelactionSubmitDeadlineModel(BaseSettingsModel): - enabled: bool = SettingsField(True, title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class BlenderSubmitDeadlineModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - class AOVFilterSubmodel(BaseSettingsModel): _layout = "expanded" name: str = SettingsField(title="Host") @@ -328,7 +273,6 @@ class AOVFilterSubmodel(BaseSettingsModel): class ProcessCacheJobFarmModel(BaseSettingsModel): """Process submitted job on farm.""" - enabled: bool = SettingsField(title="Enabled") deadline_department: str = SettingsField(title="Department") deadline_pool: str = SettingsField(title="Pool") deadline_group: str = SettingsField(title="Group") @@ -338,7 +282,6 @@ class ProcessCacheJobFarmModel(BaseSettingsModel): class ProcessSubmittedJobOnFarmModel(BaseSettingsModel): """Process submitted job on farm.""" - enabled: bool = SettingsField(title="Enabled") deadline_department: str = SettingsField(title="Department") deadline_pool: str = SettingsField(title="Pool") deadline_group: str = SettingsField(title="Group") @@ -368,7 +311,12 @@ def validate_unique_names(cls, value): class PublishPluginsModel(BaseSettingsModel): CollectJobInfo: CollectJobInfoModel = SettingsField( default_factory=CollectJobInfoModel, - title="Collect JobInfo") + title="Collect JobInfo", + description="Generic plugin collecting Deadline job properties like " + "Pools, Groups etc. It allows atomic control based on " + "Profiles (eg. different tasky types might use different " + "Pools etc.)" + ) CollectAYONServerToFarmJob: CollectAYONServerToFarmJobModel = SettingsField( # noqa default_factory=CollectAYONServerToFarmJobModel, title="Add AYON server to farm job", @@ -382,34 +330,12 @@ class PublishPluginsModel(BaseSettingsModel): default_factory=ValidateExpectedFilesModel, title="Validate Expected Files" ) - AfterEffectsSubmitDeadline: AfterEffectsSubmitDeadlineModel = ( - SettingsField( - default_factory=AfterEffectsSubmitDeadlineModel, - title="After Effects to deadline", - section="Hosts" - ) - ) - BlenderSubmitDeadline: BlenderSubmitDeadlineModel = SettingsField( - default_factory=BlenderSubmitDeadlineModel, - title="Blender Submit Deadline") - CelactionSubmitDeadline: CelactionSubmitDeadlineModel = SettingsField( - default_factory=CelactionSubmitDeadlineModel, - title="Celaction Submit Deadline") FusionSubmitDeadline: FusionSubmitDeadlineModel = SettingsField( default_factory=FusionSubmitDeadlineModel, title="Fusion submit to Deadline") - HarmonySubmitDeadline: HarmonySubmitDeadlineModel = SettingsField( - default_factory=HarmonySubmitDeadlineModel, - title="Harmony Submit to deadline") - HoudiniCacheSubmitDeadline: HoudiniCacheSubmitDeadlineModel = SettingsField( - default_factory=HoudiniCacheSubmitDeadlineModel, - title="Houdini Submit cache to deadline") HoudiniSubmitDeadline: HoudiniSubmitDeadlineModel = SettingsField( default_factory=HoudiniSubmitDeadlineModel, title="Houdini Submit render to deadline") - MaxSubmitDeadline: MaxSubmitDeadlineModel = SettingsField( - default_factory=MaxSubmitDeadlineModel, - title="Max Submit to deadline") MayaSubmitDeadline: MayaSubmitDeadlineModel = SettingsField( default_factory=MayaSubmitDeadlineModel, title="Maya Submit to deadline") @@ -426,10 +352,6 @@ class PublishPluginsModel(BaseSettingsModel): DEFAULT_DEADLINE_PLUGINS_SETTINGS = { - "CollectDeadlinePools": { - "primary_pool": "", - "secondary_pool": "" - }, "CollectAYONServerToFarmJob": { "enabled": False }, @@ -444,55 +366,18 @@ class PublishPluginsModel(BaseSettingsModel): "deadline" ] }, - "AfterEffectsSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - }, - "BlenderSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - }, - "CelactionSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - }, "FusionSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - }, - "HarmonySubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - }, - "HoudiniCacheSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, + "concurrent_tasks": 1, + "plugin": "Fusion" }, "HoudiniSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, "export_priority": 50, "export_chunk_size": 10, "export_group": "", "export_limits": "", "export_machine_limit": 0 }, - "MaxSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, - }, "MayaSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, "tile_assembler_plugin": "DraftTileAssembler", "import_reference": False, "strict_error_checking": True, @@ -500,19 +385,15 @@ class PublishPluginsModel(BaseSettingsModel): "scene_patches": [] }, "NukeSubmitDeadline": { - "enabled": True, - "optional": False, - "active": True, + "use_gpu": True }, "ProcessSubmittedCacheJobOnFarm": { - "enabled": True, "deadline_department": "", "deadline_pool": "", "deadline_group": "", "deadline_priority": 50 }, "ProcessSubmittedJobOnFarm": { - "enabled": True, "deadline_department": "", "deadline_pool": "", "deadline_group": "", From 112220e87ce74e820f8c4e1350fb6fa8b867b464 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 10:57:40 +0100 Subject: [PATCH 116/153] Provided basic profile for CollectJobInfo --- server/settings/publish_plugins.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index dcc40fb2ed..b930d76453 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -352,6 +352,34 @@ class PublishPluginsModel(BaseSettingsModel): DEFAULT_DEADLINE_PLUGINS_SETTINGS = { + "CollectJobInfo": { + "profiles": [ + { + "group": "", + "priority": 50, + "job_delay": "", + "overrides": [], + "chunk_size": 999, + "department": "", + "host_names": [], + "task_names": [], + "task_types": [], + "limit_groups": [], + "machine_list": [], + "multiprocess": False, + "primary_pool": "", + "machine_limit": 0, + "use_published": True, + "secondary_pool": "", + "concurrent_tasks": 1, + "machine_list_deny": False, + "additional_job_info": "", + "additional_plugin_info": "", + "use_asset_dependencies": True, + "use_workfile_dependency": True + } + ] + }, "CollectAYONServerToFarmJob": { "enabled": False }, From cd81da211d6c70a4198d07857c46c920e717c5ce Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 11:15:53 +0100 Subject: [PATCH 117/153] Added missed default artist overrides --- server/settings/publish_plugins.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index b930d76453..c76d1a502c 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -358,7 +358,14 @@ class PublishPluginsModel(BaseSettingsModel): "group": "", "priority": 50, "job_delay": "", - "overrides": [], + "overrides": [ + "department", + "chunk_size", + "group", + "priority", + "primary_pool", + "secondary_pool" + ], "chunk_size": 999, "department": "", "host_names": [], From 973286647912ce18a1bf342ec3824c643d42cb74 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:07:33 +0100 Subject: [PATCH 118/153] Fix wrong access to job_delay default value --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index f404f4db38..7f3357f960 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -258,7 +258,7 @@ def _get_artist_overrides(cls, overrides, profile): TextDef( "job_delay", label="Delay job", - default=default_values["job_delay"], + default=default_values.get("job_delay"), tooltip="Delay job by specified timecode. Format: dd:hh:mm:ss", placeholder="00:00:00:00" ) From 42148a9a347197a4363cc8941e5771babe6a93a7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:11:54 +0100 Subject: [PATCH 119/153] Use list comprehension for pool enum Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 7f3357f960..3aaaf15cd8 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -91,8 +91,10 @@ def apply_settings(cls, project_settings): deadline_addon = addons_manager["deadline"] deadline_server_name = settings["deadline_server"] pools = deadline_addon.get_pools_by_server_name(deadline_server_name) - for pool in pools: - cls.pool_enum_values.append({"value": pool, "label": pool}) + cls.pool_enum_values = [ + {"value": pool, "label": pool} + for pool in pools + ] groups = deadline_addon.get_groups_by_server_name(deadline_server_name) for group in groups: From c9d1c4a6b6c4bd9bb013073a0cea14272a0c58a7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:12:39 +0100 Subject: [PATCH 120/153] Use list comprehension for machines enum Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/global/collect_jobinfo.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 3aaaf15cd8..ccf564bddf 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -108,11 +108,13 @@ def apply_settings(cls, project_settings): cls.limit_group_enum_values.append( {"value": limit_group, "label": limit_group}) - machines = ( - deadline_addon.get_machines_by_server_name(deadline_server_name)) - for machine in machines: - cls.machines_enum_values.append( - {"value": machine, "label": machine}) + machines = deadline_addon.get_machines_by_server_name( + deadline_server_name + ) + cls.machines_enum_values = [ + {"value": machine, "label": machine} + for machine in machines + ] @classmethod def get_attr_defs_for_instance(cls, create_context, instance): From d58ff4d8bc19e363dcf78e718f4e6e46d5a50dab Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:13:04 +0100 Subject: [PATCH 121/153] Use list comprehension Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/global/collect_jobinfo.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index ccf564bddf..c4b318c932 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -193,11 +193,11 @@ def _get_artist_overrides(cls, overrides, profile): filtered.append(value) default_value = filtered if key == "limit_groups": - filtered = [] - for value in default_value: - if value in cls.limit_group_enum_values: - filtered.append(value) - default_value = filtered + default_value = [ + value + for value in default_value + if value in cls.limit_group_enum_values + ] if key == "group" and default_value not in cls.group_enum_values: default_value = "" default_values[key] = default_value From 005a5fb23dbee40a5a9b62f36179d615fea34955 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:13:28 +0100 Subject: [PATCH 122/153] Fix condition Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index c4b318c932..d71019cfa6 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -283,7 +283,7 @@ def on_values_changed(cls, event): for instance_change in event["changes"]: instance = instance_change["instance"] #recalculate only if context changes - if not instance_change.get("productName"): + if "productName" not in instance_change: continue if not cls.instance_matches_plugin_families(instance): From deb8aa85afcbdcdb7acc66f22f9fdd9ab85f2ea9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:13:54 +0100 Subject: [PATCH 123/153] Fix copy to deepcopy Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index dd5c429b2d..669043f514 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,7 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: - inst["deadline"] = instance.data["deadline"].copy() + inst["deadline"] = copy.deepcopy(instance.data["deadline"]) inst["deadline"].pop("job_info") # publish job file From ba26549537bd96fa7e66f585d451e821ce308184 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:16:07 +0100 Subject: [PATCH 124/153] Fix formatting --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 669043f514..c0f5ca8b7a 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -482,7 +482,6 @@ def process(self, instance): "comment": instance.context.data.get("comment"), "job": render_job or None, "instances": instances - } if deadline_publish_job_id: publish_job["deadline_publish_job_id"] = deadline_publish_job_id From faa1cbab37fea1abae91f15e82155b51118ce2a1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:17:26 +0100 Subject: [PATCH 125/153] Use list comprehension for groups --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index d71019cfa6..1158065af9 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -97,8 +97,10 @@ def apply_settings(cls, project_settings): ] groups = deadline_addon.get_groups_by_server_name(deadline_server_name) - for group in groups: - cls.group_enum_values.append({"value": group, "label": group}) + cls.group_enum_values = [ + {"value": group, "label": group} + for group in groups + ] limit_groups = ( deadline_addon.get_limit_groups_by_server_name(deadline_server_name)) From ea9b2d6289f0a5672d3a0f31de8155566884d4a1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:18:45 +0100 Subject: [PATCH 126/153] Use list comprehension for limit groups --- .../plugins/publish/global/collect_jobinfo.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 1158065af9..c5a3918edf 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -102,13 +102,16 @@ def apply_settings(cls, project_settings): for group in groups ] - limit_groups = ( - deadline_addon.get_limit_groups_by_server_name(deadline_server_name)) - if not limit_groups: - limit_groups.append("none") # enum cannot be empty - for limit_group in limit_groups: - cls.limit_group_enum_values.append( - {"value": limit_group, "label": limit_group}) + limit_groups = deadline_addon.get_limit_groups_by_server_name( + deadline_server_name + ) + limit_group_items = [ + {"value": limit_group, "label": limit_group} + for limit_group in limit_groups + ] + if not limit_group_items: + limit_group_items.append({"value": None, "label": "< none >"}) + cls.group_enum_values = limit_group_items machines = deadline_addon.get_machines_by_server_name( deadline_server_name From a307520fc275a9be74a3f7c487d67dc20dd412da Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:21:09 +0100 Subject: [PATCH 127/153] Use list comprehension for machine_limit defaults --- .../plugins/publish/global/collect_jobinfo.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index c5a3918edf..4af78c8691 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -192,11 +192,11 @@ def _get_artist_overrides(cls, overrides, profile): for key in overrides: default_value = profile[key] if key == "machine_limit": - filtered = [] - for value in default_value: - if value in cls.machines_enum_values: - filtered.append(value) - default_value = filtered + default_value = [ + value + for value in default_value + if value in cls.machines_enum_values + ] if key == "limit_groups": default_value = [ value From 7e0ad18262de5ecc30fda079e92f9a7310e1de8a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:22:54 +0100 Subject: [PATCH 128/153] Fix wrong assignment --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 4af78c8691..c3d99cbb8c 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -111,7 +111,7 @@ def apply_settings(cls, project_settings): ] if not limit_group_items: limit_group_items.append({"value": None, "label": "< none >"}) - cls.group_enum_values = limit_group_items + cls.limit_group_enum_values = limit_group_items machines = deadline_addon.get_machines_by_server_name( deadline_server_name From df7a85e8040e528250205342bab68cc25d35306f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:30:28 +0100 Subject: [PATCH 129/153] Fix deepcopy from import --- .../ayon_deadline/plugins/publish/global/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index c0f5ca8b7a..93e057b3df 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -466,7 +466,7 @@ def process(self, instance): # Inject deadline url to instances to query DL for job id for overrides for inst in instances: - inst["deadline"] = copy.deepcopy(instance.data["deadline"]) + inst["deadline"] = deepcopy(instance.data["deadline"]) inst["deadline"].pop("job_info") # publish job file From 5c4ebd0888a3d2a55d3c339c866822d607fa87c5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 13 Nov 2024 14:31:22 +0100 Subject: [PATCH 130/153] Added deadline to instance families That way FARM_FAMILIES could be purged only 'deadline' after all creators would add it. --- client/ayon_deadline/lib.py | 3 ++- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 44c91cd24d..8ef6c236a6 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -17,7 +17,8 @@ "vrayscene", "maxrender", "arnold_rop", "mantra_rop", "karma_rop", "vray_rop", "redshift_rop", - "renderFarm", "usdrender", "publish.hou" + "renderFarm", "usdrender", "publish.hou", + "deadline" ] # Constant defining where we store job environment variables on instance or diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index c3d99cbb8c..1e65a7e14d 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -64,6 +64,9 @@ def process(self, instance): json.loads(attr_values["additional_plugin_info"])) instance.data["deadline"]["plugin_info_data"] = plugin_info_data + if "deadline" not in instance.data["families"]: + instance.data["families"].append("deadline") + def _handle_additional_jobinfo(self,attr_values, job_info): """Adds not explicitly implemented fields by values from Settings.""" additional_job_info = attr_values["additional_job_info"] From c05ddc2d39f340c6d84e7c2876d3cd6118d8be3f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 14 Nov 2024 11:53:24 +0100 Subject: [PATCH 131/153] Make frames optional Some DCC might not collect that in time Co-authored-by: Mustafa Jafar --- client/ayon_deadline/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/lib.py b/client/ayon_deadline/lib.py index 8ef6c236a6..2a5d99c433 100644 --- a/client/ayon_deadline/lib.py +++ b/client/ayon_deadline/lib.py @@ -521,7 +521,7 @@ def from_dict(cls, data: Dict[str, Any]) -> 'AYONDeadlineJobInfo': "Priority": data["priority"], "MachineLimit": data["machine_limit"], "ConcurrentTasks": data["concurrent_tasks"], - "Frames": data["frames"], + "Frames": data.get("frames", ""), "Group": cls._sanitize(data["group"]), "Pool": cls._sanitize(data["primary_pool"]), "SecondaryPool": cls._sanitize(data["secondary_pool"]), From d3cbe0ba8c4b0519afd206f1c3675e83d262e280 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Nov 2024 11:40:07 +0100 Subject: [PATCH 132/153] Fix updates of job_info between jobs Co-authored-by: Mustafa Jafar --- client/ayon_deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index e416b3200e..709afd0672 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -101,7 +101,7 @@ def process(self, instance): assert self._deadline_url, "Requires Deadline Webservice URL" job_info = self.get_generic_job_info(instance) - self.job_info = self.get_job_info(job_info=job_info) + self.job_info = self.get_job_info(job_info=deepcopy(job_info)) self._set_scene_path( context.data["currentFile"], job_info.UsePublished) From 269b943975c4fef13162965fb3dc2e84dd4defcd Mon Sep 17 00:00:00 2001 From: MustafaJafar Date: Fri, 15 Nov 2024 13:23:13 +0200 Subject: [PATCH 133/153] add missing import --- client/ayon_deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 709afd0672..3bbe362b6e 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -9,7 +9,7 @@ import getpass import os import datetime - +from copy import deepcopy import requests From fa9327c7b074a2f2e743a7cc1dbafe7cc3e55992 Mon Sep 17 00:00:00 2001 From: MustafaJafar Date: Fri, 15 Nov 2024 13:25:10 +0200 Subject: [PATCH 134/153] remove unused import --- .../plugins/publish/global/validate_deadline_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py b/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py index d7db4f69dc..18fffda91e 100644 --- a/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/validate_deadline_jobinfo.py @@ -4,7 +4,7 @@ PublishValidationError, OptionalPyblishPluginMixin ) -from ayon_deadline.lib import FARM_FAMILIES, get_deadline_pools +from ayon_deadline.lib import FARM_FAMILIES class ValidateDeadlineJobInfo( From 3aa671c815b6a807da7159e35d357d45f679a5c4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 15 Nov 2024 18:15:39 +0100 Subject: [PATCH 135/153] Fix propagation updated job_info --- client/ayon_deadline/abstract_submit_deadline.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 3bbe362b6e..1c1bf2a533 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -116,6 +116,8 @@ def process(self, instance): job_id = self.process_submission() self.log.info("Submitted job to Deadline: {}.".format(job_id)) + instance.data["deadline"]["job_info"] = deepcopy(self.job_info) + # TODO: Find a way that's more generic and not render type specific if instance.data.get("splitRender"): self.log.info("Splitting export and render in two jobs") @@ -130,6 +132,8 @@ def process(self, instance): auth = instance.data["deadline"]["auth"] verify = instance.data["deadline"]["verify"] render_job_id = self.submit(payload, auth, verify) + + instance.data["deadline"]["job_info"] = deepcopy(render_job_info) self.log.info("Render job id: %s", render_job_id) def _set_scene_path(self, current_file, use_published): From fa016580575b9eedcd7526aabcd2c3cfa3ec5281 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Nov 2024 09:55:01 +0100 Subject: [PATCH 136/153] Fix resetting OutputDirectory for multiCamera Cannot be None as concatenation would be an issue. --- .../ayon_deadline/plugins/publish/max/submit_max_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py b/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py index f2e20322b0..f92b10a14e 100644 --- a/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py +++ b/client/ayon_deadline/plugins/publish/max/submit_max_deadline.py @@ -49,8 +49,8 @@ def get_job_info(self, job_info=None): # do not add expected files for multiCamera if instance.data.get("multiCamera"): - job_info.OutputDirectory = None - job_info.OutputFilename = None + job_info.OutputDirectory.clear() + job_info.OutputFilename.clear() return job_info From 2cea5559ff31c742b5d638cc367a861fdf61df65 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Nov 2024 16:11:02 +0100 Subject: [PATCH 137/153] Fix formatting --- client/ayon_deadline/abstract_submit_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/abstract_submit_deadline.py b/client/ayon_deadline/abstract_submit_deadline.py index 1c1bf2a533..719d5fddea 100644 --- a/client/ayon_deadline/abstract_submit_deadline.py +++ b/client/ayon_deadline/abstract_submit_deadline.py @@ -114,7 +114,7 @@ def process(self, instance): self.apply_additional_plugin_info(plugin_info_data) job_id = self.process_submission() - self.log.info("Submitted job to Deadline: {}.".format(job_id)) + self.log.info(f"Submitted job to Deadline: {job_id}.") instance.data["deadline"]["job_info"] = deepcopy(self.job_info) From 6a306ff17d9816a088cda03d8362a98e2bbbfc18 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Nov 2024 16:14:55 +0100 Subject: [PATCH 138/153] Fix dataclass validation --- .../publish/maya/submit_maya_deadline.py | 28 ++++++++++--------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index 5ca38a560a..a4fe802a3b 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -39,17 +39,6 @@ from ayon_deadline import abstract_submit_deadline -def _validate_deadline_bool_value(instance, attribute, value): - if not isinstance(value, (str, bool)): - raise TypeError( - "Attribute {} must be str or bool.".format(attribute)) - if value not in {"1", "0", True, False}: - raise ValueError( - ("Value of {} must be one of " - "'0', '1', True, False").format(attribute) - ) - - @dataclass class MayaPluginInfo(object): SceneFile: str = field(default=None) # Input @@ -61,10 +50,23 @@ class MayaPluginInfo(object): Renderer: str = field(default=None) ProjectPath: str = field(default=None) # Resolve relative references # Include all lights flag - RenderSetupIncludeLights: str = field( - default="1", validator=_validate_deadline_bool_value) + RenderSetupIncludeLights: str = field(default="1") StrictErrorChecking: bool = field(default=True) + def __post__init__(self): + self._validate_deadline_bool_value() + + def _validate_deadline_bool_value(self): + if not isinstance(self.RenderSetupIncludeLights, (str, bool)): + raise TypeError( + "Attribute 'RenderSetupIncludeLights' must be str or bool." + ) + if self.RenderSetupIncludeLights not in {"1", "0", True, False}: + raise ValueError( + ("Value of {} must be one of " + "'0', '1', True, False").format("RenderSetupIncludeLights") + ) + @dataclass class PythonPluginInfo(object): From 0d0afe2f39294812e5ed9335db43cfece14e7a78 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 18 Nov 2024 16:22:15 +0100 Subject: [PATCH 139/153] Fix dataclass validation --- .../plugins/publish/maya/submit_maya_deadline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py index a4fe802a3b..38439ce237 100644 --- a/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py +++ b/client/ayon_deadline/plugins/publish/maya/submit_maya_deadline.py @@ -63,8 +63,8 @@ def _validate_deadline_bool_value(self): ) if self.RenderSetupIncludeLights not in {"1", "0", True, False}: raise ValueError( - ("Value of {} must be one of " - "'0', '1', True, False").format("RenderSetupIncludeLights") + "Value of 'RenderSetupIncludeLights' must be one of " + "'0', '1', True, False" ) From 8e47a56a3471dc4500bd31977521d8ba0a19f607 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 11:48:50 +0100 Subject: [PATCH 140/153] Formatting change Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- server/settings/publish_plugins.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index c76d1a502c..ad982344d0 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -30,16 +30,16 @@ class CollectAYONServerToFarmJobModel(BaseSettingsModel): def extract_jobinfo_overrides_enum(): """Enum of fields that could be overridden by artist in Publisher UI""" return [ - {"label": "Department", "value": "department"}, - {"label": "Delay job (timecode dd:hh:mm:ss)", "value": "job_delay"}, - {"label": "Frames per Task", "value": "chunk_size"}, - {"label": "Group", "value": "group"}, - {"label": "Priority", "value": "priority"}, - {"label": "Limit groups", "value": "limit_groups"}, - {"label": "Primary pool", "value": "primary_pool"}, - {"label": "Secondary pool", "value": "secondary_pool"}, - {"label": "Machine List", "value": "machine_list"}, - {"label": "Machine List is a Deny", "value": "machine_list_deny"}, + {"value": department, "label": "Department"}, + {"value": job_delay, "label": "Delay job (timecode dd:hh:mm:ss)"}, + {"value": chunk_size, "label": "Frames per Task"}, + {"value": group, "label": "Group"}, + {"value": priority, "label": "Priority"}, + {"value": limit_groups, "label": "Limit groups"}, + {"value": primary_pool, "label": "Primary pool"}, + {"value": secondary_pool, "label": "Secondary pool"}, + {"value": machine_list, "label": "Machine List"}, + {"value": machine_list_deny, "label": "Machine List is a Deny"}, ] From 4628fa69b3094431305ff1c1cece775917b2be08 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 11:49:58 +0100 Subject: [PATCH 141/153] elif update Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 1e65a7e14d..66f969c2c5 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -200,7 +200,7 @@ def _get_artist_overrides(cls, overrides, profile): for value in default_value if value in cls.machines_enum_values ] - if key == "limit_groups": + elif key == "limit_groups": default_value = [ value for value in default_value From 13d7cf5223c2d460665cd365cf4d2eb34e7d1d4e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 11:55:16 +0100 Subject: [PATCH 142/153] Fix checking for default value --- .../plugins/publish/global/collect_jobinfo.py | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 66f969c2c5..98231e7c0d 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -193,21 +193,35 @@ def _get_artist_overrides(cls, overrides, profile): # should be matching to extract_jobinfo_overrides_enum default_values = {} for key in overrides: - default_value = profile[key] if key == "machine_limit": + available_values = { + item["value"] + for item in cls.machines_enum_values + } default_value = [ value for value in default_value - if value in cls.machines_enum_values + if value in available_values ] elif key == "limit_groups": + available_values = { + item["value"] + for item in cls.limit_group_enum_values + } default_value = [ value for value in default_value - if value in cls.limit_group_enum_values + if value in available_values ] - if key == "group" and default_value not in cls.group_enum_values: - default_value = "" + elif key == "group": + available_values = [ + item["value"] + for item in cls.group_enum_values + ] + if not available_values: + default_value = None + elif default_value not in available_values: + default_value = available_values[0] default_values[key] = default_value attr_defs = [ From 25c1e7fb3ddbc5d86fe00d2ddb5ff560f24045c7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 11:59:04 +0100 Subject: [PATCH 143/153] Fix extending host specific definitions --- .../plugins/publish/global/collect_jobinfo.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 98231e7c0d..6518c53d40 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -179,7 +179,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): ) ) - defs = cls._host_specific_attr_defs(create_context, instance, defs) + defs.extend(cls._host_specific_attr_defs(create_context, instance)) defs.append( UISeparatorDef("deadline_defs_end") @@ -346,11 +346,10 @@ def _get_jobinfo_defaults(self, instance): return profile or {} @classmethod - def _host_specific_attr_defs(cls, create_context, instance, defs): - + def _host_specific_attr_defs(cls, create_context, instance): host_name = create_context.host_name if host_name == "maya": - defs.extend([ + return [ NumberDef( "tile_priority", label="Tile Assembler Priority", @@ -360,6 +359,6 @@ def _host_specific_attr_defs(cls, create_context, instance, defs): "strict_error_checking", label="Strict Error Checking", ), - ]) + ] - return defs + return [] From 1445df7edcb4f261c6e691253f93594746456421 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 12:00:18 +0100 Subject: [PATCH 144/153] Fix farm check optionality --- .../plugins/publish/fusion/submit_fusion_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py index b925bd1339..9fe9f803ad 100644 --- a/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py +++ b/client/ayon_deadline/plugins/publish/fusion/submit_fusion_deadline.py @@ -41,7 +41,7 @@ class FusionSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, plugin = None def process(self, instance): - if not instance.data["farm"]: + if not instance.data.get("farm"): self.log.debug("Render on farm is disabled. " "Skipping deadline submission.") return From 5287ab50e38e84641a87cc84faaa76b6c787643d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 12:11:59 +0100 Subject: [PATCH 145/153] Fix default value --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 6518c53d40..9b6c7275e7 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -193,6 +193,7 @@ def _get_artist_overrides(cls, overrides, profile): # should be matching to extract_jobinfo_overrides_enum default_values = {} for key in overrides: + default_value = profile[key] if key == "machine_limit": available_values = { item["value"] From 564d9936803305ed85d0925a85dcfc7f3fdb792a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 19 Nov 2024 12:17:59 +0100 Subject: [PATCH 146/153] Made condition clearer productName might not change, this condition is clear --- .../ayon_deadline/plugins/publish/global/collect_jobinfo.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index 9b6c7275e7..f7c08bfed9 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -306,7 +306,10 @@ def on_values_changed(cls, event): for instance_change in event["changes"]: instance = instance_change["instance"] #recalculate only if context changes - if "productName" not in instance_change: + if ( + "task" not in instance_change + and "folderPath" not in instance_change + ): continue if not cls.instance_matches_plugin_families(instance): From c1d31f3cdcf46b15dff6e0df1fe923c43c9943bd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Nov 2024 12:17:37 +0100 Subject: [PATCH 147/153] Formatting change Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/houdini/submit_houdini_cache_deadline.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py index 3dafeed546..86e9e97b2c 100644 --- a/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py +++ b/client/ayon_deadline/plugins/publish/houdini/submit_houdini_cache_deadline.py @@ -53,8 +53,7 @@ def get_job_info(self, job_info=None): scenename = os.path.basename(filepath) job_name = "{scene} - {instance} [PUBLISH]".format( scene=scenename, instance=instance.name) - batch_name = "{code} - {scene}".format( - code=project_name, scene=scenename) + batch_name = f"{project_name} - {scenename}" if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") From 305bf13c134eab89805696ad8965fff6f802d9a6 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 20 Nov 2024 12:18:03 +0100 Subject: [PATCH 148/153] Formatting change Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- client/ayon_deadline/plugins/publish/global/collect_jobinfo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py index f7c08bfed9..6ac544a495 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py +++ b/client/ayon_deadline/plugins/publish/global/collect_jobinfo.py @@ -135,7 +135,7 @@ def get_attr_defs_for_instance(cls, create_context, instance): Returns: (list) """ - if instance.product_type not in cls.families: + if not cls.instance_matches_plugin_families(instance): return [] host_name = create_context.host_name From ac82c139245ea0f95b6fa09ea9d9839ae835d302 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 21 Nov 2024 10:53:59 +0100 Subject: [PATCH 149/153] Fix wrong values in Settings --- server/settings/publish_plugins.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index ad982344d0..73ec7909ce 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -30,16 +30,16 @@ class CollectAYONServerToFarmJobModel(BaseSettingsModel): def extract_jobinfo_overrides_enum(): """Enum of fields that could be overridden by artist in Publisher UI""" return [ - {"value": department, "label": "Department"}, - {"value": job_delay, "label": "Delay job (timecode dd:hh:mm:ss)"}, - {"value": chunk_size, "label": "Frames per Task"}, - {"value": group, "label": "Group"}, - {"value": priority, "label": "Priority"}, - {"value": limit_groups, "label": "Limit groups"}, - {"value": primary_pool, "label": "Primary pool"}, - {"value": secondary_pool, "label": "Secondary pool"}, - {"value": machine_list, "label": "Machine List"}, - {"value": machine_list_deny, "label": "Machine List is a Deny"}, + {"value": "department", "label": "Department"}, + {"value": "job_delay", "label": "Delay job (timecode dd:hh:mm:ss)"}, + {"value": "chunk_size", "label": "Frames per Task"}, + {"value": "group", "label": "Group"}, + {"value": "priority", "label": "Priority"}, + {"value": "limit_groups", "label": "Limit groups"}, + {"value": "primary_pool", "label": "Primary pool"}, + {"value": "secondary_pool", "label": "Secondary pool"}, + {"value": "machine_list", "label": "Machine List"}, + {"value": "machine_list_deny", "label": "Machine List is a Deny"}, ] From 84eafba1be3f7f32b3c3035cfbf10ed356aaedae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 21 Nov 2024 12:26:44 +0100 Subject: [PATCH 150/153] Fix filtering on family for getting DL env vars Doesnt make much sense on ContextPlugin and it is not working on Pyblish --- .../plugins/publish/global/collect_deadline_job_env_vars.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py b/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py index f729b76e9a..392d54dd99 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py +++ b/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py @@ -9,7 +9,6 @@ class CollectDeadlineJobEnvVars(pyblish.api.ContextPlugin): """Collect set of environment variables to submit with deadline jobs""" order = pyblish.api.CollectorOrder label = "Deadline Farm Environment Variables" - families = FARM_FAMILIES targets = ["local"] ENV_KEYS = [ From 4b3b32763c48e91414153a12533159ad788222d7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 21 Nov 2024 15:05:25 +0100 Subject: [PATCH 151/153] Bump ayon dependency Should be bumped to release when available --- package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.py b/package.py index 7a1eb3679c..cdbe7dbe88 100644 --- a/package.py +++ b/package.py @@ -5,6 +5,6 @@ client_dir = "ayon_deadline" ayon_required_addons = { - "core": ">0.3.2", + "core": ">1.0.9+dev", } ayon_compatible_addons = {} From 49a7fe43a7c9dca7837fa8e5145fb0b95eea2046 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Nov 2024 11:12:19 +0100 Subject: [PATCH 152/153] concurrent_tasks moved to generic settings --- server/settings/publish_plugins.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 73ec7909ce..eef5a3a79d 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -219,9 +219,6 @@ def fusion_deadline_plugin_enum(): class FusionSubmitDeadlineModel(BaseSettingsModel): - concurrent_tasks: int = SettingsField( - 1, title="Number of concurrent tasks" - ) plugin: str = SettingsField("Fusion", enum_resolver=fusion_deadline_plugin_enum, title="Deadline Plugin") @@ -402,7 +399,6 @@ class PublishPluginsModel(BaseSettingsModel): ] }, "FusionSubmitDeadline": { - "concurrent_tasks": 1, "plugin": "Fusion" }, "HoudiniSubmitDeadline": { From 0defb2f9aff6355b79185b60686635cbe6aefa8f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 22 Nov 2024 12:17:45 +0100 Subject: [PATCH 153/153] Removed unused import --- .../plugins/publish/global/collect_deadline_job_env_vars.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py b/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py index 392d54dd99..35e8d7bee1 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py +++ b/client/ayon_deadline/plugins/publish/global/collect_deadline_job_env_vars.py @@ -2,7 +2,7 @@ import pyblish.api -from ayon_deadline.lib import FARM_FAMILIES, JOB_ENV_DATA_KEY +from ayon_deadline.lib import JOB_ENV_DATA_KEY class CollectDeadlineJobEnvVars(pyblish.api.ContextPlugin):