diff --git a/client/ayon_deadline/plugins/publish/global/collect_pools.py b/client/ayon_deadline/plugins/publish/global/collect_pools.py index b2b6bc60d4..6de68502f1 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_pools.py +++ b/client/ayon_deadline/plugins/publish/global/collect_pools.py @@ -36,6 +36,7 @@ class CollectDeadlinePools(pyblish.api.InstancePlugin, "max", "houdini", "nuke", + "unreal" ] families = FARM_FAMILIES diff --git a/client/ayon_deadline/plugins/publish/global/collect_user_credentials.py b/client/ayon_deadline/plugins/publish/global/collect_user_credentials.py index 1c59c178d3..fa59139c00 100644 --- a/client/ayon_deadline/plugins/publish/global/collect_user_credentials.py +++ b/client/ayon_deadline/plugins/publish/global/collect_user_credentials.py @@ -35,7 +35,8 @@ class CollectDeadlineUserCredentials(pyblish.api.InstancePlugin): "nuke", "maya", "max", - "houdini"] + "houdini", + "unreal"] families = FARM_FAMILIES diff --git a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py index 643dcc1c46..d773341e70 100644 --- a/client/ayon_deadline/plugins/publish/global/submit_publish_job.py +++ b/client/ayon_deadline/plugins/publish/global/submit_publish_job.py @@ -86,7 +86,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, targets = ["local"] hosts = ["fusion", "max", "maya", "nuke", "houdini", - "celaction", "aftereffects", "harmony", "blender"] + "celaction", "aftereffects", "harmony", "blender", "unreal"] families = ["render", "render.farm", "render.frames_farm", "prerender", "prerender.farm", "prerender.frames_farm", diff --git a/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py b/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py new file mode 100644 index 0000000000..cabb764437 --- /dev/null +++ b/client/ayon_deadline/plugins/publish/unreal/submit_unreal_deadline.py @@ -0,0 +1,236 @@ +import os +import attr +import getpass +import pyblish.api +from datetime import datetime +from pathlib import Path + +from ayon_core.lib import is_in_tests + +from ayon_deadline import abstract_submit_deadline +from ayon_deadline.abstract_submit_deadline import DeadlineJobInfo + + +@attr.s +class DeadlinePluginInfo(): + ProjectFile = attr.ib(default=None) + EditorExecutableName = attr.ib(default=None) + EngineVersion = attr.ib(default=None) + CommandLineMode = attr.ib(default=True) + OutputFilePath = attr.ib(default=None) + Output = attr.ib(default=None) + StartupDirectory = attr.ib(default=None) + CommandLineArguments = attr.ib(default=None) + MultiProcess = attr.ib(default=None) + PerforceStream = attr.ib(default=None) + PerforceChangelist = attr.ib(default=None) + PerforceGamePath = attr.ib(default=None) + + +class UnrealSubmitDeadline( + abstract_submit_deadline.AbstractSubmitDeadline +): + """Supports direct rendering of prepared Unreal project on Deadline + (`render` product must be created with flag for Farm publishing) OR + Perforce assisted rendering. + + For this Ayon server must contain `ayon-version-control` addon and provide + configuration for it (P4 credentials etc.)! + """ + + label = "Submit Unreal to Deadline" + order = pyblish.api.IntegratorOrder + 0.1 + hosts = ["unreal"] + families = ["render.farm"] # cannot be "render' as that is integrated + use_published = True + targets = ["local"] + + priority = 50 + chunk_size = 1000000 + group = None + department = None + multiprocess = True + + def get_job_info(self): + dln_job_info = DeadlineJobInfo(Plugin="UnrealEngine5") + + context = self._instance.context + + batch_name = self._get_batch_name() + dln_job_info.Name = self._instance.data["name"] + dln_job_info.BatchName = batch_name + dln_job_info.Plugin = "UnrealEngine5" + dln_job_info.UserName = context.data.get( + "deadlineUser", getpass.getuser()) + if self._instance.data["frameEnd"] > self._instance.data["frameStart"]: + # Deadline requires integers in frame range + frame_range = "{}-{}".format( + int(round(self._instance.data["frameStart"])), + int(round(self._instance.data["frameEnd"]))) + dln_job_info.Frames = frame_range + + dln_job_info.Priority = self.priority + dln_job_info.Pool = self._instance.data.get("primaryPool") + dln_job_info.SecondaryPool = self._instance.data.get("secondaryPool") + dln_job_info.Group = self.group + dln_job_info.Department = self.department + dln_job_info.ChunkSize = self.chunk_size + dln_job_info.OutputFilename += \ + os.path.basename(self._instance.data["file_names"][0]) + dln_job_info.OutputDirectory += \ + os.path.dirname(self._instance.data["expectedFiles"][0]) + dln_job_info.JobDelay = "00:00:00" + + keys = [ + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "AYON_PROJECT_NAME", + "AYON_FOLDER_PATH", + "AYON_TASK_NAME", + "AYON_WORKDIR", + "AYON_APP_NAME", + "AYON_LOG_NO_COLORS", + "IS_TEST", + ] + + environment = { + key: os.environ[key] + for key in keys + if key in os.environ + } + for key in keys: + value = environment.get(key) + if value: + dln_job_info.EnvironmentKeyValue[key] = value + + dln_job_info.EnvironmentKeyValue["AYON_UNREAL_VERSION"] = ( + self._instance.data)["app_version"] + + # to recognize render jobs + dln_job_info.add_render_job_env_var() + + return dln_job_info + + def get_plugin_info(self): + deadline_plugin_info = DeadlinePluginInfo() + + render_path = self._instance.data["expectedFiles"][0] + self._instance.data["outputDir"] = os.path.dirname(render_path) + self._instance.context.data["version"] = 1 #TODO + + render_dir = os.path.dirname(render_path) + file_name = self._instance.data["file_names"][0] + render_path = os.path.join(render_dir, file_name) + + deadline_plugin_info.ProjectFile = self.scene_path + deadline_plugin_info.Output = render_path.replace("\\", "/") + + deadline_plugin_info.EditorExecutableName = "UnrealEditor-Cmd.exe" # parse ayon+settings://applications/applications/unreal/variants/3/environmen + deadline_plugin_info.EngineVersion = self._instance.data["app_version"] + master_level = self._instance.data["master_level"] + render_queue_path = self._instance.data["render_queue_path"] + cmd_args = [f"{master_level} -game ", + f"-MoviePipelineConfig={render_queue_path}"] + cmd_args.extend([ + "-windowed", + "-Log", + "-StdOut", + "-allowStdOutLogVerbosity" + "-Unattended" + ]) + self.log.debug(f"cmd-args::{cmd_args}") + deadline_plugin_info.CommandLineArguments = " ".join(cmd_args) + + # if Perforce - triggered by active `changelist_metadata` instance!! + collected_version_control = self._get_version_control() + if collected_version_control: + version_control_data = self._instance.context.data[ + "version_control"] + workspace_dir = version_control_data["workspace_dir"] + stream = version_control_data["stream"] + self._update_version_control_data( + self.scene_path, + workspace_dir, + stream, + collected_version_control["change_info"]["change"], + deadline_plugin_info + ) + + return attr.asdict(deadline_plugin_info) + + def from_published_scene(self): + """ Do not overwrite expected files. + + Use published is set to True, so rendering will be triggered + from published scene (in 'publish' folder). Default implementation + of abstract class renames expected (eg. rendered) files accordingly + which is not needed here. + """ + return super().from_published_scene(False) + + def _get_batch_name(self): + """Returns value that differentiate jobs in DL. + + For automatic tests it adds timestamp, for Perforce driven change list + """ + batch_name = os.path.basename(self._instance.data["source"]) + if is_in_tests(): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + collected_version_control = self._get_version_control() + if collected_version_control: + change = (collected_version_control["change_info"] + ["change"]) + batch_name = f"{batch_name}_{change}" + return batch_name + + def _get_version_control(self): + """Look if changelist_metadata is published to get change list info. + + Context version_control contains universal connection info, instance + version_control contains detail about change list. + """ + change_list_version = {} + for inst in self._instance.context: + # get change info from `changelist_metadata` instance + change_list_version = inst.data.get("version_control") + if change_list_version: + context_version = ( + self._instance.context.data["version_control"]) + change_list_version.update(context_version) + break + return change_list_version + + def _update_version_control_data( + self, + scene_path, + workspace_dir, + stream, + change_list_id, + deadline_plugin_info + ): + """Adds Perforce metadata which causes DL pre job to sync to change. + + It triggers only in presence of activated `changelist_metadata` instance, + which materialize info about commit. Artists could return to any + published commit and re-render if they choose. + `changelist_metadata` replaces `workfile` as there are no versioned Unreal + projects (because of size). + """ + # normalize paths, c:/ vs C:/ + scene_path = str(Path(scene_path).resolve()) + workspace_dir = str(Path(workspace_dir).resolve()) + + unreal_project_file_name = os.path.basename(scene_path) + + unreal_project_hierarchy = self.scene_path.replace(workspace_dir, "") + unreal_project_hierarchy = ( + unreal_project_hierarchy.replace(unreal_project_file_name, "")) + # relative path from workspace dir to last folder + unreal_project_hierarchy = unreal_project_hierarchy.strip("\\") + + deadline_plugin_info.ProjectFile = unreal_project_file_name + + deadline_plugin_info.PerforceStream = stream + deadline_plugin_info.PerforceChangelist = change_list_id + deadline_plugin_info.PerforceGamePath = unreal_project_hierarchy diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/DeadlineRPC.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/DeadlineRPC.py new file mode 100644 index 0000000000..500fa32eec --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/DeadlineRPC.py @@ -0,0 +1,319 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +from ue_utils.rpc.server import RPCServerThread +from ue_utils.rpc.base_server import BaseRPCServerManager + +from Deadline.Scripting import RepositoryUtils + + +class BaseDeadlineRPCJobManager: + """ + This is a base class for exposing commonly used deadline function on RPC + """ + + def __init__(self): + """ + Constructor + """ + # get the instance of the deadline plugin from the python globals + self._deadline_plugin = self.__get_instance_from_globals() + + # Get the current running job + self._job = self._deadline_plugin.GetJob() + self._is_connected = False + + # Track all completed tasks + self._completed_tasks = set() + + def connect(self): + """ + First mode of contact to the rpc server. It is very critical the + client calls this function first as it will let the Deadline process + know a client has connected and to wait on the task to complete. + Else, Deadline will assume the connection was never made and requeue + the job after a few minutes + :return: bool representing the connection + """ + self._is_connected = True + print("Server connection established!") + return self._is_connected + + def is_connected(self): + """ + Returns the connection status to a client + :return: + """ + return self._is_connected + + def is_task_complete(self, task_id): + """ + Checks and returns if a task has been marked as complete + :param task_id: job task id + :return: return True/False if the task id is present + """ + return task_id in self._completed_tasks + + @staticmethod + def __get_instance_from_globals(): + """ + Get the instance of the Deadline plugin from the python globals. + Since this class is executed in a thread, this was the best method to + get the plugin instance to the class without pass it though several + layers of abstraction + :return: + """ + import __main__ + + try: + return __main__.__deadline_plugin_instance__ + except AttributeError as err: + raise RuntimeError( + f"Could not get deadline plugin instance from globals. " + f"\n\tError: {err}" + ) + + def get_job_id(self): + """ + Returns the current JobID + :return: Job ID + """ + return self._job.JobId + + def get_task_frames(self): + """ + Returns the frames rendered by ths task + :return: + """ + return [ + self._deadline_plugin.GetStartFrame(), + self._deadline_plugin.GetEndFrame() + ] + + def get_job_extra_info_key_value(self, name): + """ + Returns the value of a key in the job extra info property + :param name: Extra Info Key + :return: Returns Extra Info Value + """ + # This function is probably the most important function in the class. + # This allows you to store different types of data and retrieve the + # data from the other side. This is what makes the Unreal plugin a bit + # more feature/task agnostic + return self._job.GetJobExtraInfoKeyValue(name) + + def fail_render(self, message): + """ + Fail a render job with a message + :param message: Failure message + """ + self._deadline_plugin.FailRender(message.strip("\n")) + return True + + def set_status_message(self, message): + """ + Sets the message on the job status + :param message: Status Message + """ + self._deadline_plugin.SetStatusMessage(message) + return True + + def set_progress(self, progress): + """ + Sets the job progress + :param progress: job progress + """ + self._deadline_plugin.SetProgress(progress) + return True + + def log_warning(self, message): + """ + Logs a warning message + :param message: Log message + """ + self._deadline_plugin.LogWarning(message) + return True + + def log_info(self, message): + """ + Logs an informational message + :param message: Log message + """ + self._deadline_plugin.LogInfo(message) + return True + + def get_task_id(self): + """ + Returns the current Task ID + :return: + """ + return self._deadline_plugin.GetCurrentTaskId() + + def get_job_user(self): + """ + Return the job user + :return: + """ + return self._job.JobUserName + + def complete_task(self, task_id): + """ + Marks a task as complete. This function should be called when a task + is complete. This will allow the Deadline render taskl process to end + and get the next render task. If this is not called, deadline will + render the task indefinitely + :param task_id: Task ID to mark as complete + :return: + """ + self._completed_tasks.add(task_id) + return True + + def update_job_output_filenames(self, filenames): + """ + Updates the file names for the current job + :param list filenames: list of filenames + """ + if not isinstance(filenames, list): + filenames = list(filenames) + + self._deadline_plugin.LogInfo( + "Setting job filenames: {filename}".format( + filename=", ".join(filenames) + ) + ) + + # Set the file names on the job + RepositoryUtils.UpdateJobOutputFileNames(self._job, filenames) + + # Make sure to save the settings just in case + RepositoryUtils.SaveJob(self._job) + + def update_job_output_directories(self, directories): + """ + Updates the output directories on job + :param list directories: List of directories + """ + if not isinstance(directories, list): + directories = list(directories) + + self._deadline_plugin.LogInfo( + "Setting job directories: {directories}".format( + directories=", ".join(directories) + ) + ) + + # Set the directory on the job + RepositoryUtils.SetJobOutputDirectories(self._job, directories) + + # Make sure to save the settings just in case + RepositoryUtils.SaveJob(self._job) + + def check_path_mappings(self, paths): + """ + Resolves any path mappings set on input path + :param [str] paths: Path string with tokens + :return: Resolved path mappings + """ + if not isinstance(paths, list): + paths = list(paths) + + # Deadline returns a System.String[] object here. Convert to a proper + # list + path_mapped_strings = RepositoryUtils.CheckPathMappingForMultiplePaths( + paths, + forceSeparator="/", + verbose=False + ) + + return [str(path) for path in path_mapped_strings] + + +class DeadlineRPCServerThread(RPCServerThread): + """ + Deadline server thread + """ + + deadline_job_manager = None + + def __init__(self, name, port): + super(DeadlineRPCServerThread, self).__init__(name, port) + if self.deadline_job_manager: + self.deadline_job_manager = self.deadline_job_manager() + else: + self.deadline_job_manager = BaseDeadlineRPCJobManager() + + # Register our instance on the server + self.server.register_instance( + self.deadline_job_manager, + allow_dotted_names=True + ) + + +class DeadlineRPCServerManager(BaseRPCServerManager): + """ + RPC server manager class. This class is responsible for registering a + server thread class and starting the thread. This can be a blocking or + non-blocking thread + """ + + def __init__(self, deadline_plugin, port): + super(DeadlineRPCServerManager, self).__init__() + self.name = "DeadlineRPCServer" + self.port = port + self.is_started = False + self.__make_plugin_instance_global(deadline_plugin) + + @staticmethod + def __make_plugin_instance_global(deadline_plugin_instance): + """ + Puts an instance of the deadline plugin in the python globals. This + allows the server thread to get the plugin instance without having + the instance passthrough abstraction layers + :param deadline_plugin_instance: Deadline plugin instance + :return: + """ + import __main__ + + if not hasattr(__main__, "__deadline_plugin_instance__"): + __main__.__deadline_plugin_instance__ = None + + __main__.__deadline_plugin_instance__ = deadline_plugin_instance + + def start(self, threaded=True): + """ + Starts the server thread + :param threaded: Run as threaded or blocking + :return: + """ + super(DeadlineRPCServerManager, self).start(threaded=threaded) + self.is_started = True + + def client_connected(self): + """ + Check if there is a client connected + :return: + """ + if self.server_thread: + return self.server_thread.deadline_job_manager.is_connected() + return False + + def get_temporary_client_proxy(self): + """ + This returns client proxy and is not necessarily expected to be used + for server communication but for mostly queries. + NOTE: This behavior is implied + :return: RPC client proxy + """ + from ue_utils.rpc.client import RPCClient + + # Get the port the server is using + server = self.get_server() + _, server_port = server.socket.getsockname() + return RPCClient(port=int(server_port)).proxy + + def shutdown(self): + """ + Stops the server and shuts down the thread + :return: + """ + super(DeadlineRPCServerManager, self).shutdown() + self.is_started = False diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/JobPreLoad.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/JobPreLoad.py new file mode 100644 index 0000000000..4f47a9aae7 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/JobPreLoad.py @@ -0,0 +1,207 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +from Deadline.Scripting import * +import UnrealSyncUtil +import os +from Deadline.Scripting import FileUtils + + +# This is executed on the Slave prior to it attempting to execute a task. +# We use this to sync to the specified changelist and build the project +def __main__( deadlinePlugin ): + # + # Retrieve the settings from the job so we know which branch/stream/target this is. + # + stream = deadlinePlugin.GetPluginInfoEntry("PerforceStream") + if not stream: + print("Perforce info not collected, skipping!") + return + changelist = int(deadlinePlugin.GetPluginInfoEntryWithDefault("PerforceChangelist", "0")) + gamePath = deadlinePlugin.GetPluginInfoEntry("PerforceGamePath") + projectFile = deadlinePlugin.GetPluginInfoEntry("ProjectFile") + editorName = deadlinePlugin.GetPluginInfoEntry("EditorExecutableName") + if not editorName: + editorName = projectFile.replace('.uproject','Editor') + + bForceClean = deadlinePlugin.GetPluginInfoEntryWithDefault("ForceClean", "false").lower() == "true" + bForceFullSync = deadlinePlugin.GetPluginInfoEntryWithDefault("ForceFullSync", "false").lower() == "true" + bSyncProject = deadlinePlugin.GetPluginInfoEntryWithDefault("SyncProject", "true" ).lower() == "true" + bSyncEntireStream = deadlinePlugin.GetPluginInfoEntryWithDefault("SyncEntireStream", "false").lower() == "true" + bBuildProject = True + + # + # Set up PerforceUtil + # + + try: + env = os.environ.copy() + env["P4PORT"] = deadlinePlugin.GetProcessEnvironmentVariable("P4PORT") + env["P4USER"] = deadlinePlugin.GetProcessEnvironmentVariable("P4USER") + env["P4PASSWD"] = deadlinePlugin.GetProcessEnvironmentVariable("P4PASSWD") + print(f"env::{env}") + perforceTools = UnrealSyncUtil.PerforceUtils(stream, gamePath, env) + except UnrealSyncUtil.PerforceError as pe: + # Catch environment configuration errors. + deadlinePlugin.FailRender(pe.message) + + + # Automatically determine a perforce workspace for this local machine + try: + deadlinePlugin.SetStatusMessage("Determining Workspace") + deadlinePlugin.LogInfo("Determining client workspace for %s on %s" % (stream, perforceTools.localHost)) + deadlinePlugin.SetProgress(0) + perforceTools.DetermineClientWorkspace() + except UnrealSyncUtil.PerforceArgumentError as argError: + deadlinePlugin.LogWarning(argError.message) + deadlinePlugin.FailRender(argError.message) + except UnrealSyncUtil.PerforceMissingWorkspaceError as argError: + deadlinePlugin.LogWarning(argError.message) + deadlinePlugin.FailRender(argError.message) + except UnrealSyncUtil.PerforceMultipleWorkspaceError as argError: + deadlinePlugin.LogWarning(argError.message) + deadlinePlugin.FailRender(argError.message) + + # Set project root + # This resolves gamePath in case it contains "..."" + try: + deadlinePlugin.SetStatusMessage("Determining project root") + deadlinePlugin.LogInfo("Determining project root for %s" % (projectFile)) + deadlinePlugin.SetProgress(0) + perforceTools.DetermineProjectRoot( projectFile ) + except UnrealSyncUtil.PerforceError as argError: + deadlinePlugin.LogWarning(argError.message) + deadlinePlugin.FailRender(argError.message) + + projectRoot = perforceTools.projectRoot.replace('\\','/') + deadlinePlugin.LogInfo( "Storing UnrealProjectRoot (\"%s\") in environment variable..." % projectRoot ) + deadlinePlugin.SetProcessEnvironmentVariable( "UnrealProjectRoot", projectRoot ) + + project_path = os.path.join(projectRoot, projectFile) + deadlinePlugin.LogInfo( "Storing UnrealUProject (\"%s\") in environment variable..." % project_path ) + deadlinePlugin.SetProcessEnvironmentVariable( "UnrealUProject", project_path ) + + + # Set the option if it's syncing entire stream or just game path + perforceTools.SetSyncEntireStream( bSyncEntireStream ) + + # + # Clean workspace + # + if bForceFullSync: + deadlinePlugin.LogWarning("A full perforce sync is queued, this will take some time.") + elif bForceClean: + # We don't bother doing a clean if they're doing a force full sync. + deadlinePlugin.LogInfo("Performing a perforce clean to bring local files in sync with depot.") + perforceTools.CleanWorkspace() + deadlinePlugin.LogInfo("Finished p4 clean.") + + deadlinePlugin.LogInfo("Perforce Command Prefix: " + " ".join(perforceTools.GetP4CommandPrefix())) + + # Determine the latest changelist to sync to if unspecified. + try: + if changelist == 0: + deadlinePlugin.LogInfo("No changelist specified, determining latest...") + perforceTools.DetermineLatestChangelist() + deadlinePlugin.LogInfo("Determined %d as latest." % perforceTools.changelist) + else: + deadlinePlugin.LogInfo("Syncing to manually specified CL %d." % changelist) + perforceTools.setChangelist(changelist) + except UnrealSyncUtil.PerforceResponseError as argError: + deadlinePlugin.LogWarning(str(argError)) + deadlinePlugin.LogWarning("Changelist will be latest in subsequent commands.") + + + # + # Sync project + # + if bSyncProject: + + # Estimate how much work there is to do for a sync operation. + try: + deadlinePlugin.SetStatusMessage("Estimating work for Project sync (CL %d)" % perforceTools.changelist) + deadlinePlugin.LogInfo("Estimating work for Project sync (CL %d)" % perforceTools.changelist) + perforceTools.DetermineSyncWorkEstimate(bForceFullSync) + except UnrealSyncUtil.PerforceResponseError as argError: + deadlinePlugin.LogWarning(str(argError)) + deadlinePlugin.LogWarning("No sync estimates will be available.") + + # If there's no files to sync, let's skip running the sync. It takes a lot of time as it's a double-estimate. + if perforceTools.syncEstimates[0] == 0 and perforceTools.syncEstimates[1] == 0 and perforceTools.syncEstimates[2] == 0: + deadlinePlugin.LogInfo("Skipping sync command as estimated says there's no work to sync!") + else: + # Sync to the changelist already calculated. + try: + deadlinePlugin.SetStatusMessage("Syncing to CL %d" % perforceTools.changelist) + deadlinePlugin.LogInfo("Syncing to CL %d" % perforceTools.changelist) + deadlinePlugin.SetProgress(0) + deadlinePlugin.LogInfo("Estimated Files %s (added/updated/deleted)" % ("/".join(map(str, perforceTools.syncEstimates)))) + + logCallback = lambda tools: deadlinePlugin.SetProgress(perforceTools.GetSyncProgress() * 100) + + # Perform the sync. This could take a while. + perforceTools.Sync(logCallback, bForceFullSync) + + # The estimates are only estimates, so when the command is complete we'll ensure it looks complete. + deadlinePlugin.SetStatusMessage("Synced Workspace to CL " + str(perforceTools.changelist)) + deadlinePlugin.LogInfo("Synced Workspace to CL " + str(perforceTools.changelist)) + deadlinePlugin.SetProgress(100) + except IOError as ioError: + deadlinePlugin.LogWarning(str(ioError)) + deadlinePlugin.FailRender("Suspected Out of Disk Error while syncing: \"%s\"" % str(ioError)) + else: + deadlinePlugin.LogInfo("Skipping Project Sync due to job settings.") + + + + # + # Build project + # + if bBuildProject: + # BuildUtils requires engine root to determine a path to UnrealBuildTool + # Using Deadline system to determine the path to the executable + version = deadlinePlugin.GetPluginInfoEntry("EngineVersion") + deadlinePlugin.LogInfo('Version defined: %s' % version ) + version_string = str(version).replace(".", "_") + executable_key = f"UnrealEditorExecutable_{version_string}" + unreal_exe_list = (deadlinePlugin.GetProcessEnvironmentVariable(executable_key) + or deadlinePlugin.GetProcessEnvironmentVariable("UnrealExecutable")) + if not unreal_exe_list: + deadlinePlugin.FailRender( "Unreal Engine " + str(version) + " entry not found in .param file" ) + unreal_executable = FileUtils.SearchFileList( unreal_exe_list ) + if unreal_executable == "": + err_msg = 'Unreal Engine %s executable was not found in the semicolon separated list \"%s\".' % (str(version), str(unreal_exe_list)) + deadlinePlugin.FailRender( err_msg ) + + unreal_executable = unreal_executable.replace('\\','/') + engine_root = unreal_executable.split('/Engine/Binaries/')[0] + + uproject_path = perforceTools.uprojectPath + + buildtool = UnrealSyncUtil.BuildUtils( engine_root, uproject_path, editorName ) + + if not buildtool.IsCppProject(): + deadlinePlugin.LogInfo("Skip building process -- no need to build for BP project") + else: + deadlinePlugin.LogInfo("Starting a local build") + + try: + deadlinePlugin.LogInfo("Generating project files...") + deadlinePlugin.SetStatusMessage("Generating project files") + buildtool.GenerateProjectFiles() + except Exception as e: + deadlinePlugin.LogWarning("Caught exception while generating project files. " + str(e)) + deadlinePlugin.FailRender(str(e)) + + try: + deadlinePlugin.LogInfo("Building Engine...") + deadlinePlugin.SetStatusMessage("Building Engine") + buildtool.BuildBuildTargets() + except Exception as e: + deadlinePlugin.LogWarning("Caught exception while building engine. " + str(e)) + deadlinePlugin.FailRender(str(e)) + + + deadlinePlugin.LogInfo("Content successfully synced and engine up to date!") + deadlinePlugin.SetStatusMessage("Content Synced & Engine Up to Date") + deadlinePlugin.SetProgress(100) + diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/LICENSE b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/LICENSE new file mode 100644 index 0000000000..57aadd5d68 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Epic Games + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/PluginPreLoad.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/PluginPreLoad.py new file mode 100644 index 0000000000..20dd93274e --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/PluginPreLoad.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +# Copyright Epic Games, Inc. All Rights Reserved + +import sys +from pathlib import Path + + +def __main__(): + # Add the location of the plugin package to the system path so the plugin + # can import supplemental modules if it needs to + plugin_path = Path(__file__) + if plugin_path.parent not in sys.path: + sys.path.append(plugin_path.parent.as_posix()) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.ico b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.ico new file mode 100644 index 0000000000..e89ce873ce Binary files /dev/null and b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.ico differ diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.options b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.options new file mode 100644 index 0000000000..a125dd5b54 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.options @@ -0,0 +1,59 @@ +[Executable] +Type=String +Label=Unreal Executable +Category=Executable +Index=0 +Description=Unreal Executable +Required=true +Default= +DisableIfBlank=false + +[LoggingDirectory] +Type=string +Label=Command Line Arguments +Category=Executable +Index=2 +Description=What command line arguments should be passed to the executable? +Required=false +DisableIfBlank=false + +[ProjectFile] +Type=String +Label=Project File +Category=Project +Index=0 +Description=The name of the .uproject file ("MyGame.uproject") within the Game folder. +Required=true +Default= +DisableIfBlank=false + +[CommandLineArguments] +Type=string +Label=Command Line Arguments +Category=Project +Index=1 +Description=What command line arguments should be passed to the executable? +Required=false +Default= +DisableIfBlank=false + +[CommandLineMode] +Type=boolean +Label=Command Line Mode +Category=Project +Index=1 +Description=Should the Editor process commands as commandline arguments +Required=false +DisableIfBlank=false +Default=true + +[StartupDirectory] +Type=string +Label=Startup Directory +Category=Command Line Options +CategoryOrder=0 +Index=3 +Description=The directory to start the command line in (leave blank to use default) +Required=false +Default= +DisableIfBlank=true \ No newline at end of file diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.param b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.param new file mode 100644 index 0000000000..c65783d1dc --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.param @@ -0,0 +1,33 @@ +[About] +Type=label +Label=About +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=Unreal Engine 5 Plugin for Deadline +Description=Not configurable + +[ConcurrentTasks] +Type=label +Label=ConcurrentTasks +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=True +Description=Not configurable + +[Executable] +Type=String +Label=Executable to test job submission +Category=Options +Index=1 +Default=C:\Windows\System32\cmd.exe +Description=Unreal Executable to process the job + +[RPCWaitTime] +Type=Integer +Label=RPC Process Wait time +Category=Options +Index=2 +Default=300 +Description=The amount of seconds the RPC process should wait for a connection from Unreal diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.py new file mode 100644 index 0000000000..631da4df39 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEngine5.py @@ -0,0 +1,806 @@ +#!/usr/bin/env python3 +# Copyright Epic Games, Inc. All Rights Reserved + +import os +import time +import sys +from datetime import datetime +from pathlib import Path + +from Deadline.Plugins import DeadlinePlugin, PluginType +from FranticX.Processes import ManagedProcess +from Deadline.Scripting import RepositoryUtils, FileUtils, StringUtils + +from DeadlineRPC import ( + DeadlineRPCServerManager, + DeadlineRPCServerThread, + BaseDeadlineRPCJobManager +) + + +def GetDeadlinePlugin(): + """ + Deadline calls this function to get am instance of our class + """ + return UnrealEnginePlugin() + + +def CleanupDeadlinePlugin(deadline_plugin): + """ + Deadline call this function to run any cleanup code + :param deadline_plugin: An instance of the deadline plugin + """ + deadline_plugin.clean_up() + + +class UnrealEnginePlugin(DeadlinePlugin): + """ + Deadline plugin to execute an Unreal Engine job. + NB: This plugin makes no assumptions about what the render job is but has a + few expectations. This plugin runs as a server in the deadline process + and exposes a few Deadline functionalities over XML RPC. The managed process + used by this plugin waits for a client to connect and continuously polls the + RPC server till a task has been marked complete before exiting the + process. This behavior however has a drawback. If for some reason your + process does not mark a task complete after working on a command, + the plugin will run the current task indefinitely until specified to + end by the repository settings or manually. + """ + + def __init__(self): + """ + Constructor + """ + if sys.version_info.major == 3: + super().__init__() + self.InitializeProcessCallback += self._on_initialize_process + self.StartJobCallback += self._on_start_job + self.RenderTasksCallback += self._on_render_tasks + self.EndJobCallback += self._on_end_job + self.MonitoredManagedProcessExitCallback += self._on_process_exit + + # Set the name of the managed process to the current deadline process ID + self._unreal_process_name = f"UnrealEngine_{os.getpid()}" + self.unreal_managed_process = None + + # Keep track of the RPC manager + self._deadline_rpc_manager = None + + # Keep track of when Job Ended has been called + self._job_ended = False + + # set the plugin to commandline mode by default. This will launch the + # editor and wait for the process to exit. There is no communication + # with the deadline process. + self._commandline_mode = True + + def clean_up(self): + """ + Plugin cleanup + """ + del self.InitializeProcessCallback + del self.StartJobCallback + del self.RenderTasksCallback + del self.EndJobCallback + + if self.unreal_managed_process: + self.unreal_managed_process.clean_up() + del self.unreal_managed_process + + del self.MonitoredManagedProcessExitCallback + + def _on_initialize_process(self): + """ + Initialize the plugin + """ + self.LogInfo("Initializing job plugin") + self.SingleFramesOnly = False + self.StdoutHandling = True + self.PluginType = PluginType.Advanced + self._commandline_mode = StringUtils.ParseBoolean( + self.GetPluginInfoEntryWithDefault("CommandLineMode", "true") + ) + + if self._commandline_mode: + self.AddStdoutHandlerCallback( + ".*Progress: (\d+)%.*" + ).HandleCallback += self._handle_progress + self.AddStdoutHandlerCallback( + ".*" + ).HandleCallback += self._handle_stdout + + self.LogInfo("Initialization complete!") + + def _on_start_job(self): + """ + This is executed when the plugin picks up a job + """ + + # Skip if we are in commandline mode + if self._commandline_mode: + return + + self.LogInfo("Executing Start Job") + + # Get and set up the RPC manager for the plugin + self._deadline_rpc_manager = self._setup_rpc_manager() + + # Get a managed process + self.unreal_managed_process = UnrealEngineManagedProcess( + self._unreal_process_name, self, self._deadline_rpc_manager + ) + self.LogInfo("Done executing Start Job") + + def _setup_rpc_manager(self): + """ + Get an RPC manager for the plugin. + """ + self.LogInfo("Setting up RPC Manager") + # Setting the port to `0` will get a random available port for the + # processes to connect on. This will help avoid TIME_WAIT + # issues with the client if the job has to be re-queued + port = 0 + + # Get an instance of the deadline rpc manager class. This class will + # store an instance of this plugin in the python globals. This should + # allow threads in the process to get an instance of the plugin without + # passing the data down through the thread instance + _deadline_rpc_manager = DeadlineRPCServerManager(self, port) + + # We would like to run the server in a thread to not block deadline's + # process. Get the Deadline RPC thread class. Set the class that is + # going to be registered on the server on the thread class + DeadlineRPCServerThread.deadline_job_manager = BaseDeadlineRPCJobManager + + # Set the threading class on the deadline manager + _deadline_rpc_manager.threaded_server_class = DeadlineRPCServerThread + + return _deadline_rpc_manager + + def _on_render_tasks(self): + """ + Execute the render task + """ + # This starts a self-managed process that terminates based on the exit + # code of the process. 0 means success + if self._commandline_mode: + startup_dir = self._get_startup_directory() + + self.unreal_managed_process = UnrealEngineCmdManagedProcess( + self, self._unreal_process_name, startup_dir=startup_dir + ) + + # Auto execute the managed process + self.RunManagedProcess(self.unreal_managed_process) + exit_code = self.unreal_managed_process.ExitCode # type: ignore + + self.LogInfo(f"Process returned: {exit_code}") + + if exit_code != 0: + self.FailRender( + f"Process returned non-zero exit code '{exit_code}'" + ) + + else: + # Flush stdout. This is useful after executing the first task + self.FlushMonitoredManagedProcessStdout(self._unreal_process_name) + + # Start next tasks + self.LogWarning(f"Starting Task {self.GetCurrentTaskId()}") + + # Account for any re-queued jobs. Deadline will immediately execute + # render tasks if a job has been re-queued on the same process. If + # that happens get a new instance of the rpc manager + if not self._deadline_rpc_manager or self._job_ended: + self._deadline_rpc_manager = self._setup_rpc_manager() + + if not self._deadline_rpc_manager.is_started: + + # Start the manager + self._deadline_rpc_manager.start(threaded=True) + + # Get the socket the server is using and expose it to the + # process + server = self._deadline_rpc_manager.get_server() + + _, server_port = server.socket.getsockname() + + self.LogWarning( + f"Starting Deadline RPC Manager on port `{server_port}`" + ) + + # Get the port the server socket is going to use and + # allow other systems to get the port to the rpc server from the + # process environment variables + self.SetProcessEnvironmentVariable( + "DEADLINE_RPC_PORT", str(server_port) + ) + + # Fail if we don't have an instance to a managed process. + # This should typically return true + if not self.unreal_managed_process: + self.FailRender("There is no unreal process Running") + + if not self.MonitoredManagedProcessIsRunning(self._unreal_process_name): + # Start the monitored Process + self.StartMonitoredManagedProcess( + self._unreal_process_name, + self.unreal_managed_process + ) + + self.VerifyMonitoredManagedProcess(self._unreal_process_name) + + # Execute the render task + self.unreal_managed_process.render_task() + + self.LogWarning(f"Finished Task {self.GetCurrentTaskId()}") + self.FlushMonitoredManagedProcessStdout(self._unreal_process_name) + + def _on_end_job(self): + """ + Called when the job ends + """ + if self._commandline_mode: + return + + self.FlushMonitoredManagedProcessStdout(self._unreal_process_name) + self.LogWarning("EndJob called") + self.ShutdownMonitoredManagedProcess(self._unreal_process_name) + + # Gracefully shutdown the RPC manager. This will also shut down any + # threads spun up by the manager + if self._deadline_rpc_manager: + self._deadline_rpc_manager.shutdown() + + # Mark the job as ended. This also helps us to know when a job has + # been re-queued, so we can get a new instance of the RPC manager, + # as Deadline calls End Job when an error occurs + self._job_ended = True + + def _on_process_exit(self): + # If the process ends unexpectedly, make sure we shut down the manager + # gracefully + if self._commandline_mode: + return + + if self._deadline_rpc_manager: + self._deadline_rpc_manager.shutdown() + + def _handle_stdout(self): + """ + Handle stdout + """ + self._deadline_plugin.LogInfo(self.GetRegexMatch(0)) + + def _handle_progress(self): + """ + Handles any progress reports + :return: + """ + progress = float(self.GetRegexMatch(1)) + self.SetProgress(progress) + + def _get_startup_directory(self): + """ + Get startup directory + """ + startup_dir = self.GetPluginInfoEntryWithDefault( + "StartupDirectory", "" + ).strip() + # Get the project root path + project_root = self.GetProcessEnvironmentVariable("ProjectRoot") + + if startup_dir: + if project_root: + startup_dir = startup_dir.format(ProjectRoot=project_root) + + self.LogInfo("Startup Directory: {dir}".format(dir=startup_dir)) + return startup_dir.replace("\\", "/") + + +class UnrealEngineManagedProcess(ManagedProcess): + """ + Process for executing and managing an unreal jobs. + + .. note:: + + Although this process can auto start a batch process by + executing a script on startup, it is VERY important the command + that is executed on startup makes a connection to the Deadline RPC + server. + This will allow Deadline to know a task is running and will wait + until the task is complete before rendering the next one. If this + is not done, Deadline will assume something went wrong with the + process and fail the job after a few minutes. It is also VERY + critical the Deadline process is told when a task is complete, so + it can move on to the next one. See the Deadline RPC manager on how + this communication system works. + The reason for this complexity is, sometimes an unreal project can + take several minutes to load, and we only want to bare the cost of + that load time once between tasks. + + """ + + def __init__(self, process_name, deadline_plugin, deadline_rpc_manager): + """ + Constructor + :param process_name: The name of this process + :param deadline_plugin: An instance of the plugin + :param deadline_rpc_manager: An instance of the rpc manager + """ + if sys.version_info.major == 3: + super().__init__() + self.InitializeProcessCallback += self._initialize_process + self.RenderExecutableCallback += self._render_executable + self.RenderArgumentCallback += self._render_argument + self._deadline_plugin = deadline_plugin + self._deadline_rpc_manager = deadline_rpc_manager + self._temp_rpc_client = None + self._name = process_name + self._executable_path = None + + # Elapsed time to check for connection + self._process_wait_time = int(self._deadline_plugin.GetConfigEntryWithDefault("RPCWaitTime", "300")) + + def clean_up(self): + """ + Called when the plugin cleanup is called + """ + self._deadline_plugin.LogInfo("Executing managed process cleanup.") + # Clean up stdout handler callbacks. + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + self._deadline_plugin.LogInfo("Managed Process Cleanup Finished.") + + def _initialize_process(self): + """ + Called by Deadline to initialize the process. + """ + self._deadline_plugin.LogInfo( + "Executing managed process Initialize Process." + ) + + # Set the ManagedProcess specific settings. + self.PopupHandling = False + self.StdoutHandling = True + + # Set the stdout handlers. + + self.AddStdoutHandlerCallback( + "LogPython: Error:.*" + ).HandleCallback += self._handle_stdout_error + self.AddStdoutHandlerCallback( + "Warning:.*" + ).HandleCallback += self._handle_stdout_warning + + logs_dir = self._deadline_plugin.GetPluginInfoEntryWithDefault( + "LoggingDirectory", "" + ) + + if logs_dir: + + job = self._deadline_plugin.GetJob() + + log_file_dir = os.path.join( + job.JobName, + f"{job.JobSubmitDateTime.ToUniversalTime()}".replace(" ", "-"), + ) + + if not os.path.exists(log_file_dir): + os.makedirs(log_file_dir) + + # If a log directory is specified, this may redirect stdout to the + # log file instead. This is a builtin Deadline behavior + self.RedirectStdoutToFile( + os.path.join( + log_file_dir, + f"{self._deadline_plugin.GetSlaveName()}_{datetime.now()}.log".replace(" ", "-") + ) + ) + + def _handle_std_out(self): + self._deadline_plugin.LogInfo(self.GetRegexMatch(0)) + + # Callback for when a line of stdout contains a WARNING message. + def _handle_stdout_warning(self): + self._deadline_plugin.LogWarning(self.GetRegexMatch(0)) + + # Callback for when a line of stdout contains an ERROR message. + def _handle_stdout_error(self): + self._deadline_plugin.FailRender(self.GetRegexMatch(0)) + + def render_task(self): + """ + Render a task + """ + + # Fail the render is we do not have a manager running + if not self._deadline_rpc_manager: + self._deadline_plugin.FailRender("No rpc manager was running!") + + # Start a timer to monitor the process time + start_time = time.time() + + # Get temp client connection + if not self._temp_rpc_client: + self._temp_rpc_client = self._deadline_rpc_manager.get_temporary_client_proxy() + + + print("Is server and client connected?", self._temp_rpc_client.is_connected()) + + # Make sure we have a manager running, and we can establish a connection + if not self._temp_rpc_client.is_connected(): + # Wait for a connection. This polls the server thread till an + # unreal process client has connected. It is very important that + # a connection is established by the client to allow this process + # to execute. + while round(time.time() - start_time) <= self._process_wait_time: + try: + # keep checking to see if a client has connected + if self._temp_rpc_client.is_connected(): + self._deadline_plugin.LogInfo( + "Client connection established!!" + ) + break + except Exception: + pass + + self._deadline_plugin.LogInfo("Waiting on client connection..") + self._deadline_plugin.FlushMonitoredManagedProcessStdout( + self._name + ) + time.sleep(2) + else: + + # Fail the render after waiting too long + self._deadline_plugin.FailRender( + "A connection was not established with an unreal process" + ) + + # if we are connected, wait till the process task is marked as + # complete. + while not self._temp_rpc_client.is_task_complete( + self._deadline_plugin.GetCurrentTaskId() + ): + # Keep flushing stdout + self._deadline_plugin.FlushMonitoredManagedProcessStdout(self._name) + + # Flush one last time + self._deadline_plugin.FlushMonitoredManagedProcessStdout(self._name) + + def _render_executable(self): + """ + Get the render executable + """ + self._deadline_plugin.LogInfo("Setting up Render Executable") + + executable = self._deadline_plugin.GetEnvironmentVariable("UnrealExecutable") + + if not executable: + executable = self._deadline_plugin.GetPluginInfoEntry("Executable") + + # Resolve any path mappings required + executable = RepositoryUtils.CheckPathMapping(executable) + + project_root = self._deadline_plugin.GetEnvironmentVariable("ProjectRoot") + + # If a project root is specified in the environment, it is assumed a + # previous process resolves the root location of the executable and + # presents it in the environment. + if project_root: + # Resolve any `{ProjectRoot}` tokens present in the executable path + executable = executable.format(ProjectRoot=project_root) + + # Make sure the executable exists + if not FileUtils.FileExists(executable): + self._deadline_plugin.FailRender(f"Could not find `{executable}`") + + self._executable_path = executable.replace("\\", "/") + + self._deadline_plugin.LogInfo(f"Found executable `{executable}`") + + return self._executable_path + + def _render_argument(self): + """ + Get the arguments to startup unreal + """ + self._deadline_plugin.LogInfo("Settifdfdsfsdfsfsfasng UP Render Arguments") + + # Look for any unreal uproject paths in the process environment. This + # assumes a previous process resolves a uproject path and makes it + # available. + uproject = self._deadline_plugin.GetEnvironmentVariable("UnrealUProject") + + if not uproject: + uproject = self._deadline_plugin.GetPluginInfoEntry("ProjectFile") + self._deadline_plugin.LogInfo(f"hhhh") + # Get any path mappings required. Expects this to be a full path + uproject = RepositoryUtils.CheckPathMapping(uproject) + + # Get the project root path + project_root = self._deadline_plugin.GetEnvironmentVariable("ProjectRoot") + + # Resolve any `{ProjectRoot}` tokens in the environment + if project_root: + uproject = uproject.format(ProjectRoot=project_root) + + uproject = Path(uproject.replace("\\", "/")) + self._deadline_plugin.LogInfo(f"Suproject:: `{uproject}`") + # Check to see if the Uproject is a relative path + if str(uproject).replace("\\", "/").startswith("../"): + + if not self._executable_path: + self._deadline_plugin.FailRender("Could not find executable path to resolve relative path.") + + # Find executable root + import re + engine_dir = re.findall("([\s\S]*.Engine)", self._executable_path) + if not engine_dir: + self._deadline_plugin.FailRender("Could not find executable Engine directory.") + + executable_root = Path(engine_dir[0]).parent + + # Resolve editor relative paths + found_paths = sorted(executable_root.rglob(str(uproject).replace("\\", "/").strip("../"))) + + if not found_paths or len(found_paths) > 1: + self._deadline_plugin.FailRender( + f"Found multiple uprojects relative to the root directory. There should only be one when a relative path is defined." + ) + + uproject = found_paths[0] + + # make sure the project exists + if not FileUtils.FileExists(uproject.as_posix()): + self._deadline_plugin.FailRender(f"Could not find `{uproject.as_posix()}`") + self._deadline_plugin.GetPluginInfoEntryWithDefault("CommandLineArguments", "") + # Set up the arguments to startup unreal. + job_command_args = [ + '"{u_project}"'.format(u_project=uproject.as_posix()), + cmd_args, + # Force "-log" otherwise there is no output from the executable + "-log", + "-unattended", + "-stdout", + "-allowstdoutlogverbosity", + ] + + arguments = " ".join(job_command_args) + self._deadline_plugin.LogInfo(f"Startup Arguments: `{arguments}`") + + return arguments + + +class UnrealEngineCmdManagedProcess(ManagedProcess): + """ + Process for executing unreal over commandline + """ + + def __init__(self, deadline_plugin, process_name, startup_dir=""): + """ + Constructor + :param process_name: The name of this process + """ + if sys.version_info.major == 3: + super().__init__() + self._deadline_plugin = deadline_plugin + self._name = process_name + self.ExitCode = -1 + self._startup_dir = startup_dir + self._executable_path = None + + self.InitializeProcessCallback += self._initialize_process + self.RenderExecutableCallback += self._render_executable + self.RenderArgumentCallback += self._render_argument + self.CheckExitCodeCallback += self._check_exit_code + self.StartupDirectoryCallback += self._startup_directory + + def clean_up(self): + """ + Called when the plugin cleanup is called + """ + self._deadline_plugin.LogInfo("Executing managed process cleanup.") + # Clean up stdout handler callbacks. + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + del self.CheckExitCodeCallback + del self.StartupDirectoryCallback + self._deadline_plugin.LogInfo("Managed Process Cleanup Finished.") + + def _initialize_process(self): + """ + Called by Deadline to initialize the process. + """ + self._deadline_plugin.LogInfo( + "Executing managed process Initialize Process." + ) + + # Set the ManagedProcess specific settings. + self.PopupHandling = True + self.StdoutHandling = True + self.HideDosWindow = True + + # Ensure child processes are killed and the parent process is + # terminated on exit + self.UseProcessTree = True + self.TerminateOnExit = True + + shell = self._deadline_plugin.GetPluginInfoEntryWithDefault("Shell", "") + + if shell: + self._shell = shell + + self.AddStdoutHandlerCallback( + ".*Progress: (\d+)%.*" + ).HandleCallback += self._handle_progress + + # self.AddStdoutHandlerCallback("LogPython: Error:.*").HandleCallback += self._handle_stdout_error + + # Get the current frames for the task + current_task_frames = self._deadline_plugin.GetCurrentTask().TaskFrameString + + # Set the frames sting as an environment variable + self.SetEnvironmentVariable("CURRENT_RENDER_FRAMES", current_task_frames) + + def _handle_stdout_error(self): + """ + Callback for when a line of stdout contains an ERROR message. + """ + self._deadline_plugin.FailRender(self.GetRegexMatch(0)) + + def _check_exit_code(self, exit_code): + """ + Returns the process exit code + :param exit_code: + :return: + """ + self.ExitCode = exit_code + + def _startup_directory(self): + """ + Startup directory + """ + return self._startup_dir + + def _handle_progress(self): + """ + Handles progress reports + """ + progress = float(self.GetRegexMatch(1)) + self._deadline_plugin.SetProgress(progress) + + def _render_executable(self): + """ + Get the render executable + """ + + self._deadline_plugin.LogInfo("Setting up Render Executable") + + executable = self._deadline_plugin.GetEnvironmentVariable("UnrealExecutable") + + if not executable: + executable = self._deadline_plugin.GetPluginInfoEntry("Executable") + + # Get the executable from the plugin + executable = RepositoryUtils.CheckPathMapping(executable) + # Get the project root path + project_root = self._deadline_plugin.GetProcessEnvironmentVariable( + "ProjectRoot" + ) + + # Resolve any `{ProjectRoot}` tokens in the environment + if project_root: + executable = executable.format(ProjectRoot=project_root) + + if not FileUtils.FileExists(executable): + self._deadline_plugin.FailRender( + "{executable} could not be found".format(executable=executable) + ) + + # TODO: Setup getting executable from the config as well + + self._deadline_plugin.LogInfo( + "Render Executable: {exe}".format(exe=executable) + ) + self._executable_path = executable.replace("\\", "/") + + return self._executable_path + + def _render_argument(self): + """ + Get the arguments to startup unreal + :return: + """ + self._deadline_plugin.LogInfo("Setting up Render Arguments") + + # Look for any unreal uproject paths in the process environment. This + # assumes a previous process resolves a uproject path and makes it + # available. + project_file = self._deadline_plugin.GetEnvironmentVariable("UnrealUProject") + + if not project_file: + project_file = self._deadline_plugin.GetPluginInfoEntry("ProjectFile") + + # Get any path mappings required. Expects this to be a full path + project_file = RepositoryUtils.CheckPathMapping(project_file) + + # Get the project root path + project_root = self._deadline_plugin.GetProcessEnvironmentVariable( + "ProjectRoot" + ) + + # Resolve any `{ProjectRoot}` tokens in the environment + if project_root: + project_file = project_file.format(ProjectRoot=project_root) + + if not project_file: + self._deadline_plugin.FailRender( + f"Expected project file but found `{project_file}`" + ) + + project_file = Path(project_file.replace("\u201c", '"').replace( + "\u201d", '"' + ).replace("\\", "/")) + + # Check to see if the Uproject is a relative path + if str(project_file).replace("\\", "/").startswith("../"): + + if not self._executable_path: + self._deadline_plugin.FailRender("Could not find executable path to resolve relative path.") + + # Find executable root + import re + engine_dir = re.findall("([\s\S]*.Engine)", self._executable_path) + if not engine_dir: + self._deadline_plugin.FailRender("Could not find executable Engine directory.") + + executable_root = Path(engine_dir[0]).parent + + # Resolve editor relative paths + found_paths = sorted(executable_root.rglob(str(project_file).replace("\\", "/").strip("../"))) + + if not found_paths or len(found_paths) > 1: + self._deadline_plugin.FailRender( + f"Found multiple uprojects relative to the root directory. There should only be one when a relative path is defined." + ) + + project_file = found_paths[0] + self._deadline_plugin.LogInfo(f"project_file:: `{project_file}`") + # make sure the project exists + if not FileUtils.FileExists(project_file.as_posix()): + self._deadline_plugin.FailRender(f"Could not find `{project_file.as_posix()}`") + + # Get the render arguments + args = RepositoryUtils.CheckPathMapping( + self._deadline_plugin.GetPluginInfoEntry( + "CommandLineArguments" + ).strip() + ) + + args = args.replace("\u201c", '"').replace("\u201d", '"') + + startup_args = " ".join( + [ + '"{u_project}"'.format(u_project=project_file.as_posix()), + args, + "-log", + "-unattended", + "-stdout", + "-allowstdoutlogverbosity", + ] + ) + + self._deadline_plugin.LogInfo( + "Render Arguments: {args}".format(args=startup_args) + ) + + return startup_args diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Config/BaseMoviePipelineDeadline.ini b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Config/BaseMoviePipelineDeadline.ini new file mode 100644 index 0000000000..cde689a963 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Config/BaseMoviePipelineDeadline.ini @@ -0,0 +1,12 @@ +[CoreRedirects] ++PackageRedirects=(OldName="/Script/MoviePipelineDeadlineIntegration", NewName="/Script/MoviePipelineIntegration", MatchSubstring=true) ++PackageRedirects=(OldName="/Script/MoviePipelineIntegration", NewName="/Script/MoviePipelineDeadline", MatchSubstring=true) ++ClassRedirects=(OldName="/Script/MoviePipelineDeadlineIntegration.MoviePipelineDeadlineJob", NewName="/Script/MoviePipelineIntegration.DeadlineMoviePipelineExecutorJob") ++ClassRedirects=(OldName="/Script/MoviePipelineIntegration.DeadlineMoviePipelineExecutorJob", NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob") ++ClassRedirects=(OldName="/Script/MoviePipelineIntegration.DeadlineMoviePipelineIntegrationSettings", NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineSettings") ++ClassRedirects=(OldName="/Engine/PythonTypes.MoviePipelineEditorDeadlineExecutor", NewName="/Engine/PythonTypes.MoviePipelineDeadlineEditorRenderExecutor") ++FunctionRedirects=(OldName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob.GetDeadlineJobInfoStructWithOverridesIfApplicable",NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob.GetDeadlineJobPresetStructWithOverridesIfApplicable") ++PropertyRedirects=(OldName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob.PresetLibrary",NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob.JobPreset") ++PropertyRedirects=(OldName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineSettings.DefaultPresetLibrary",NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineSettings.DefaultJobPreset") ++PropertyRedirects=(OldName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineSettings.JobInfoPropertiesToHideInMovieRenderQueue",NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineSettings.JobPresetPropertiesToHideInMovieRenderQueue") ++FunctionRedirects=(OldName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob.GetDeadlineJobPresetStructWithOverridesIfApplicable",NewName="/Script/MoviePipelineDeadline.MoviePipelineDeadlineExecutorJob.GetDeadlineJobPresetStructWithOverrides") diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Config/FilterPlugin.ini b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Config/FilterPlugin.ini new file mode 100644 index 0000000000..ccebca2f32 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Config/FilterPlugin.ini @@ -0,0 +1,8 @@ +[FilterPlugin] +; This section lists additional files which will be packaged along with your plugin. Paths should be listed relative to the root plugin directory, and +; may include "...", "*", and "?" wildcards to match directories, files, and individual characters respectively. +; +; Examples: +; /README.txt +; /Extras/... +; /Binaries/ThirdParty/*.dll diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/init_unreal.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/init_unreal.py new file mode 100644 index 0000000000..e78c82381f --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/init_unreal.py @@ -0,0 +1,42 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-in +import sys +from pathlib import Path + +# Third-party +import unreal + +import remote_executor +import mrq_cli + +plugin_name = "MoviePipelineDeadline" + + +# Add the actions path to sys path +actions_path = Path(__file__).parent.joinpath("pipeline_actions").as_posix().lower() + +if actions_path not in sys.path: + sys.path.append(actions_path) + +from pipeline_actions import render_queue_action + +# Register the menu from the render queue actions +render_queue_action.register_menu_action() + +# The asset registry may not be fully loaded by the time this is called, +# warn the user that attempts to look assets up may fail +# unexpectedly. +# Look for a custom commandline start key `-waitonassetregistry`. This key +# is used to trigger a synchronous wait on the asset registry to complete. +# This is useful in commandline states where you explicitly want all assets +# loaded before continuing. +asset_registry = unreal.AssetRegistryHelpers.get_asset_registry() +if asset_registry.is_loading_assets() and ("-waitonassetregistry" in unreal.SystemLibrary.get_command_line().split()): + unreal.log_warning( + f"Asset Registry is still loading. The {plugin_name} plugin will " + f"be loaded after the Asset Registry is complete." + ) + + asset_registry.wait_for_completion() + unreal.log(f"Asset Registry is complete. Loading {plugin_name} plugin.") diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli.py new file mode 100644 index 0000000000..2dce8d09bf --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli.py @@ -0,0 +1,228 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +This is a commandline script that can be used to execute local and remote renders from Unreal. +This script can be executed in Editor or via commandline. + +This script has several modes: + + manifest: + This mode allows you to specify a full path to manifest file and a queue will be created from the manifest. + + Command: + .. code-block:: shell + + $ py mrq_cli.py manifest "Full/Path/To/Manifest.utxt" + Options: + *--load*: This allows you to only load the manifest file without executing a render. + + sequence: + This mode allows you to specify a specific level sequence, map and movie render queue preset to render. + + Command: + .. code-block:: shell + + $ py mrq_cli.py sequence my_level_sequence_name my_map_name my_mrq_preset_name + + queue: + This mode allows you to load and render a queue asset. + + Command: + .. code-block:: shell + + $ py mrq_cli.py queue "/Game/path/to/queue/asset" + Options: + *--load*: This allows you to only load the queue asset without executing a render. + + *--jobs*: A queue can have more than one job. This allows you to specify particular jobs in the queue and render its current state + + render: + This mode allows you to render the jobs in the current loaded queue. This is useful when you what to execute + renders in multi steps. For example, executing in a farm context, you can load a manifest file and trigger + multiple different shots for the current worker machine based on some database without reloading the + manifest file everytime. By default, the queue is rendered in its current state if no other arguments are + specified. + + Command: + .. code-block:: shell + + $ py mrq_cli.py render + Options: + *--jobs*: The current queue can have more than one job. This allows you to specify a particular list of jobs in the queue and render in its current state + + +**Optional Arguments**: + + There a few optional arguments that can be supplied to the script and are global to the modes + + *--shots*: This option allows you to specify a list of shots to render in the queue. This optional argument can be used with both modes of the script. + + *--all-shots*: This options enables all shots on all jobs. This is useful when you want to render everything in a queue. + + *--user*: This options sets the author on the render job. If None is provided, the current logged-in user is used. + + *--remote/-r*: This option submits the render to a remote process. This remote process is whatever is set in the + MRQ remote executor option. This script is targeted for Deadline. However, it can still support + the default "Out-of-Process" executor. This flag can be used with both modes of the script. + When specifying a remote command for deadline, you'll need to also supply these commands: + + *--batch_name*: This sets the batch name on the executor. + + *--deadline_job_preset*: The deadline preset for Deadline job/plugin info + + +Editor CMD window: + .. code-block:: shell + + $ py mrq_cli.py <--remote> sequence sequence_name map mrq_preset_name + +Editor Commandline: + .. code-block:: shell + + UnrealEditor.exe uproject_name/path -execcmds="py mrq_cli.py sequence sequence_name map mrq_preset_name --cmdline" + +In a commandline interface, it is very important to append `--cmdline` to the script args as this will tell the editor +to shut down after a render is complete. Currently, this is the only method to keep the editor open till a render is +complete due to the default python commandlet assuming when a python script ends, the editor needs to shut down. +This behavior is not ideal as PIE is an asynchronous process we need to wait for during rendering. +""" + +import argparse + +from mrq_cli_modes import render_manifest, render_sequence, render_queue, render_queue_jobs + + +if __name__ == "__main__": + + # A parser to hold all arguments we want available on sub parsers. + global_parser = argparse.ArgumentParser( + description="This parser contains any global arguments we would want available on subparsers", + add_help=False + ) + # Determine if the editor was run from a commandline + global_parser.add_argument( + "--cmdline", + action="store_true", + help="Flag for noting execution from commandline. " + "This will shut the editor down after a render is complete or failed." + ) + + global_parser.add_argument( + "-u", + "--user", + type=str, + help="The user the render job will be submitted as." + ) + + # Group the flags for remote rendering. This is just a conceptual group + # and not a logical group. It is mostly shown in the help interface + remote_group = global_parser.add_argument_group("remote") + + # Determine if this is a local render or a remote render. If the remote + # flag is present, it's a remote render + remote_group.add_argument( + "-r", + "--remote", + action="store_true", + help="Determines if the render should be executed remotely." + ) + + # Option flag for remote renders. This will fail if not provided along + # with the --remote flag + remote_group.add_argument( + "--batch_name", + type=str, + help="The batch render name for the current remote render job." + ) + + remote_group.add_argument( + "--deadline_job_preset", + help="The remote job preset to use when rendering the current job." + ) + + # Setup output override groups for the jobs + output_group = global_parser.add_argument_group("output") + output_group.add_argument( + "--output_override", + type=str, + help="Output folder override for the queue asset" + ) + output_group.add_argument( + "--filename_override", + type=str, + help="Filename override for the queue asset" + ) + + # Shots group + shots_group = global_parser.add_argument_group("shots") + # Shots to render in a Queue + shots_group.add_argument( + "-s", + "--shots", + type=str, + nargs="+", + help="A list of shots to render in the level sequence. " + "If no shots are provided, all shots in the level sequence will be rendered." + ) + shots_group.add_argument( + "--all-shots", + action="store_true", + help="Render all shots in the queue. This will enable all shots for all jobs." + ) + + # Create the main entry parser + parser = argparse.ArgumentParser( + prog="PYMoviePipelineCLI", + description="Commandline Interface for rendering MRQ jobs" + ) + + # Create sub commands + sub_commands = parser.add_subparsers(help="Sub-commands help") + + # Create a sub command for rendering with a manifest file + manifest_parser = sub_commands.add_parser( + "manifest", + parents=[global_parser], + help="Command to load and render queue from a manifest file." + ) + + # Add arguments for the manifest parser + render_manifest.setup_manifest_parser(manifest_parser) + + # Create a sub command used to render a specific sequence with a map and + # mrq preset + sequence_parser = sub_commands.add_parser( + "sequence", + parents=[global_parser], + help="Command to render a specific sequence, map, and mrq preset." + ) + + # Add arguments for the sequence parser + render_sequence.setup_sequence_parser(sequence_parser) + + # Setup a parser for rendering a specific queue asset + queue_parser = sub_commands.add_parser( + "queue", + parents=[global_parser], + help="Command to render a movie pipeline queue." + ) + + # Add arguments for the queue parser + render_queue.setup_queue_parser(queue_parser) + + # Add arguments for the rendering the current queue + render_parser = sub_commands.add_parser( + "render", + parents=[global_parser], + help="Command to render the current loaded render queue." + ) + + # Add arguments for the queue parser + render_queue_jobs.setup_render_parser(render_parser) + + # Process the args using the argument execution functions. + # Parse known arguments returns a tuple of arguments that are recognized + # and others. Get the recognized arguments and execute their set defaults + args, _ = parser.parse_known_args() + print(args) + args.func(args) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/__init__.py new file mode 100644 index 0000000000..eef4820d47 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/__init__.py @@ -0,0 +1,15 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +from .render_manifest import render_queue_manifest +from .render_queue import render_queue_asset +from .render_queue_jobs import render_jobs +from .render_sequence import render_current_sequence +from . import utils + +__all__ = [ + "render_jobs", + "render_queue_manifest", + "render_queue_asset", + "render_current_sequence", + "utils", +] diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_manifest.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_manifest.py new file mode 100644 index 0000000000..26bbb66b72 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_manifest.py @@ -0,0 +1,165 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +This script handles processing manifest files for rendering in MRQ +""" + +import unreal +from getpass import getuser +from pathlib import Path + +from .render_queue_jobs import render_jobs +from .utils import movie_pipeline_queue + + +def setup_manifest_parser(subparser): + """ + This method adds a custom execution function and args to a subparser + + :param subparser: Subparser for processing manifest files + """ + # Movie pipeline manifest file from disk + subparser.add_argument( + "manifest", type=Path, help="Full local path to a MRQ manifest file." + ) + + # Add option to only load the contents of the manifest file. By default, + # this will render after loading the manifest file + subparser.add_argument( + "--load", + action="store_true", + help="Only load the contents of the manifest file. " + "By default the manifest will be loaded and rendered.", + ) + + # Function to process arguments + subparser.set_defaults(func=_process_args) + + +def render_queue_manifest( + manifest, + load_only=False, + shots=None, + user=None, + is_remote=False, + is_cmdline=False, + remote_batch_name=None, + remote_job_preset=None, + executor_instance=None, + output_dir_override=None, + output_filename_override=None +): + """ + Function to execute a render using a manifest file + + :param str manifest: Manifest file to render + :param bool load_only: Only load the manifest file + :param list shots: Shots to render from the queue + :param str user: Render user + :param bool is_remote: Flag to determine if the jobs should be rendered remote + :param bool is_cmdline: Flag to determine if the job is a commandline job + :param str remote_batch_name: Batch name for remote renders + :param str remote_job_preset: Remote render preset library + :param executor_instance: Movie Pipeline executor instance + :param str output_dir_override: Movie Pipeline output directory override + :param str output_filename_override: Movie Pipeline filename format override + :return: MRQ Executor + """ + # The queue subsystem behaves like a singleton so + # clear all the jobs in the current queue. + movie_pipeline_queue.delete_all_jobs() + + # Manifest args returns a Pathlib object, get the results as a string and + # load the manifest + movie_pipeline_queue.copy_from( + unreal.MoviePipelineLibrary.load_manifest_file_from_string(manifest) + ) + + # If we only want to load the manifest file, then exit after loading + # the manifest. + # If we want to shut down the editor as well, then do so + if load_only: + + if is_cmdline: + unreal.SystemLibrary.quit_editor() + + return None + + # Manifest files are a per job configuration. So there should only be one + # job in a manifest file + + # Todo: Make sure there are always only one job in the manifest file + if movie_pipeline_queue.get_jobs(): + render_job = movie_pipeline_queue.get_jobs()[0] + else: + raise RuntimeError("There are no jobs in the queue!!") + + # MRQ added the ability to enable and disable jobs. Check to see if a job + # is disabled and enable it. + # The assumption is we want to render this particular job. + # Note this try except block is for backwards compatibility + try: + if not render_job.enabled: + render_job.enabled = True + except AttributeError: + pass + + # Set the author on the job + render_job.author = user or getuser() + + # If we have a shot list, iterate over the shots in the sequence + # and disable anything that's not in the shot list. If no shot list is + # provided render all the shots in the sequence + if shots: + for shot in render_job.shot_info: + if shot.inner_name in shots or (shot.outer_name in shots): + shot.enabled = True + else: + unreal.log_warning( + f"Disabling shot `{shot.inner_name}` from current render job `{render_job.job_name}`" + ) + shot.enabled = False + + try: + # Execute the render. + # This will execute the render based on whether its remote or local + executor = render_jobs( + is_remote, + remote_batch_name=remote_batch_name, + remote_job_preset=remote_job_preset, + executor_instance=executor_instance, + is_cmdline=is_cmdline, + output_dir_override=output_dir_override, + output_filename_override=output_filename_override + ) + + except Exception as err: + unreal.log_error( + f"An error occurred executing the render.\n\tError: {err}" + ) + raise + + return executor + + +def _process_args(args): + """ + Function to process the arguments for the manifest subcommand + :param args: Parsed Arguments from parser + """ + # This is a Path object + # Convert to string representation + manifest = args.manifest.as_posix() + + return render_queue_manifest( + manifest, + load_only=args.load, + shots=args.shots, + user=args.user, + is_remote=args.remote, + is_cmdline=args.cmdline, + remote_batch_name=args.batch_name, + remote_job_preset=args.deadline_job_preset, + output_dir_override=args.output_override, + output_filename_override=args.filename_override + ) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_queue.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_queue.py new file mode 100644 index 0000000000..8c66293040 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_queue.py @@ -0,0 +1,155 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +This script handles processing jobs for a specific queue asset +""" +import unreal + +from .render_queue_jobs import render_jobs +from .utils import ( + get_asset_data, + movie_pipeline_queue, + update_queue +) + + +def setup_queue_parser(subparser): + """ + This method adds a custom execution function and args to a queue subparser + + :param subparser: Subparser for processing custom sequences + """ + # Set the name of the job + subparser.add_argument( + "queue", + type=str, + help="The name or path to a movie pipeline queue." + ) + + # Add option to only load the contents of the queue. By default, + # this will only load the queue and render its contents + subparser.add_argument( + "--load", + action="store_true", + help="Load the contents of the queue asset. By default the queue asset will loaded and render its contents.", + ) + + # We will use the level sequence and the map as our context for + # other subsequence arguments. + subparser.add_argument( + "--jobs", + type=str, + nargs="+", + help="A list of jobs to execute in the queue. " + "If no jobs are provided, all jobs in the queue will be rendered.", + ) + + # Function to process arguments + subparser.set_defaults(func=_process_args) + + +def render_queue_asset( + queue_name, + only_load=False, + shots=None, + jobs=None, + all_shots=False, + is_cmdline=False, + is_remote=False, + user=None, + remote_batch_name=None, + remote_job_preset=None, + executor_instance=None, + output_dir_override=None, + output_filename_override=None +): + """ + Render using a Movie Render Queue asset + + :param str queue_name: The name of the Queue asset + :param bool only_load: Only load the queue asset. This is usually used when you need to process intermediary steps before rendering + :param list shots: Shots to render from the queue. + :param list jobs: The list job to render in the Queue asset. + :param bool all_shots: Flag to render all shots in a job in the queue. + :param bool is_cmdline: Flag to determine if the job is a commandline job + :param bool is_remote: Flag to determine if the jobs should be rendered remote + :param str user: Render user + :param str remote_batch_name: Batch name for remote renders + :param str remote_job_preset: Remote render job preset + :param executor_instance: Movie Pipeline executor instance + :param str output_dir_override: Movie Pipeline output directory override + :param str output_filename_override: Movie Pipeline filename format override + :return: MRQ Executor + """ + + # The queue subsystem behaves like a singleton so + # clear all the jobs in the current queue. + movie_pipeline_queue.delete_all_jobs() + + # Get the queue data asset package path by name or by path + # Create a new queue from the queue asset + movie_pipeline_queue.copy_from( + get_asset_data(queue_name, "MoviePipelineQueue").get_asset() + ) + + # If we only want to load the queue asset, then exit after loading. + # If we want to shut down the editor as well, then do so + if only_load: + + if is_cmdline: + unreal.SystemLibrary.quit_editor() + + return None + + if not movie_pipeline_queue.get_jobs(): + # Make sure we have jobs in the queue to work with + raise RuntimeError("There are no jobs in the queue!!") + + # Allow executing the render queue in its current loaded state + if all_shots or (any([shots, jobs])): + update_queue( + jobs=jobs, + shots=shots, + all_shots=all_shots, + user=user + ) + + try: + # Execute the render. This will execute the render based on whether + # its remote or local + executor = render_jobs( + is_remote, + remote_batch_name=remote_batch_name, + remote_job_preset=remote_job_preset, + is_cmdline=is_cmdline, + executor_instance=executor_instance, + output_dir_override=output_dir_override, + output_filename_override=output_filename_override + ) + + except Exception: + raise + + return executor + + +def _process_args(args): + """ + Function to process the arguments for the sequence subcommand + :param args: Parsed Arguments from parser + """ + + return render_queue_asset( + args.queue, + only_load=args.load, + shots=args.shots, + jobs=args.jobs, + all_shots=args.all_shots, + is_remote=args.remote, + is_cmdline=args.cmdline, + user=args.user, + remote_batch_name=args.batch_name, + remote_job_preset=args.deadline_job_preset, + output_dir_override=args.output_override, + output_filename_override=args.filename_override + ) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_queue_jobs.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_queue_jobs.py new file mode 100644 index 0000000000..de1665d5aa --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_queue_jobs.py @@ -0,0 +1,112 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +This script handles processing jobs and shots in the current loaded queue +""" +import unreal + +from .utils import ( + movie_pipeline_queue, + execute_render, + setup_remote_render_jobs, + update_render_output +) + + +def setup_render_parser(subparser): + """ + This method adds a custom execution function and args to a render subparser + + :param subparser: Subparser for processing custom sequences + """ + + # Function to process arguments + subparser.set_defaults(func=_process_args) + + +def render_jobs( + is_remote=False, + is_cmdline=False, + executor_instance=None, + remote_batch_name=None, + remote_job_preset=None, + output_dir_override=None, + output_filename_override=None +): + """ + This renders the current state of the queue + + :param bool is_remote: Is this a remote render + :param bool is_cmdline: Is this a commandline render + :param executor_instance: Movie Pipeline Executor instance + :param str remote_batch_name: Batch name for remote renders + :param str remote_job_preset: Remote render job preset + :param str output_dir_override: Movie Pipeline output directory override + :param str output_filename_override: Movie Pipeline filename format override + :return: MRQ executor + """ + + if not movie_pipeline_queue.get_jobs(): + # Make sure we have jobs in the queue to work with + raise RuntimeError("There are no jobs in the queue!!") + + # Update the job + for job in movie_pipeline_queue.get_jobs(): + + # If we have output job overrides and filename overrides, update it on + # the job + if output_dir_override or output_filename_override: + update_render_output( + job, + output_dir=output_dir_override, + output_filename=output_filename_override + ) + + # Get the job output settings + output_setting = job.get_configuration().find_setting_by_class( + unreal.MoviePipelineOutputSetting + ) + + # Allow flushing flies to disk per shot. + # Required for the OnIndividualShotFinishedCallback to get called. + output_setting.flush_disk_writes_per_shot = True + + if is_remote: + setup_remote_render_jobs( + remote_batch_name, + remote_job_preset, + movie_pipeline_queue.get_jobs(), + ) + + try: + # Execute the render. + # This will execute the render based on whether its remote or local + executor = execute_render( + is_remote, + executor_instance=executor_instance, + is_cmdline=is_cmdline, + ) + + except Exception as err: + unreal.log_error( + f"An error occurred executing the render.\n\tError: {err}" + ) + raise + + return executor + + +def _process_args(args): + """ + Function to process the arguments for the sequence subcommand + :param args: Parsed Arguments from parser + """ + + return render_jobs( + is_remote=args.remote, + is_cmdline=args.cmdline, + remote_batch_name=args.batch_name, + remote_job_preset=args.deadline_job_preset, + output_dir_override=args.output_override, + output_filename_override=args.filename_override + ) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_sequence.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_sequence.py new file mode 100644 index 0000000000..4348cec820 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/render_sequence.py @@ -0,0 +1,177 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +This script handles processing jobs for a specific sequence +""" + +import unreal +from getpass import getuser + +from .render_queue_jobs import render_jobs +from .utils import ( + movie_pipeline_queue, + project_settings, + get_asset_data +) + + +def setup_sequence_parser(subparser): + """ + This method adds a custom execution function and args to a sequence subparser + + :param subparser: Subparser for processing custom sequences + """ + # We will use the level sequence and the map as our context for + # other subsequence arguments. + subparser.add_argument( + "sequence", type=str, help="The level sequence that will be rendered." + ) + subparser.add_argument( + "map", + type=str, + help="The map the level sequence will be loaded with for rendering.", + ) + + # Get some information for the render queue + subparser.add_argument( + "mrq_preset", + type=str, + help="The MRQ preset used to render the current job.", + ) + + # Function to process arguments + subparser.set_defaults(func=_process_args) + + +def render_current_sequence( + sequence_name, + sequence_map, + mrq_preset, + user=None, + shots=None, + is_remote=False, + is_cmdline=False, + remote_batch_name=None, + remote_job_preset=None, + executor_instance=None, + output_dir_override=None, + output_filename_override=None +): + """ + Renders a sequence with a map and mrq preset + + :param str sequence_name: Sequence to render + :param str sequence_map: Map to load sequence + :param str mrq_preset: MRQ preset for rendering sequence + :param str user: Render user + :param list shots: Shots to render + :param bool is_remote: Flag to determine if the job should be executed remotely + :param bool is_cmdline: Flag to determine if the render was executed via commandline + :param str remote_batch_name: Remote render batch name + :param str remote_job_preset: deadline job Preset Library + :param executor_instance: Movie Pipeline executor Instance + :param str output_dir_override: Movie Pipeline output directory override + :param str output_filename_override: Movie Pipeline filename format override + :return: MRQ executor + """ + + # The queue subsystem behaves like a singleton so + # clear all the jobs in the current queue. + movie_pipeline_queue.delete_all_jobs() + + render_job = movie_pipeline_queue.allocate_new_job( + unreal.SystemLibrary.conv_soft_class_path_to_soft_class_ref( + project_settings.default_executor_job + ) + ) + + # Set the author on the job + render_job.author = user or getuser() + + sequence_data_asset = get_asset_data(sequence_name, "LevelSequence") + + # Create a job in the queue + unreal.log(f"Creating render job for `{sequence_data_asset.asset_name}`") + render_job.job_name = sequence_data_asset.asset_name + + unreal.log( + f"Setting the job sequence to `{sequence_data_asset.asset_name}`" + ) + render_job.sequence = sequence_data_asset.to_soft_object_path() + + map_data_asset = get_asset_data(sequence_map, "World") + unreal.log(f"Setting the job map to `{map_data_asset.asset_name}`") + render_job.map = map_data_asset.to_soft_object_path() + + mrq_preset_data_asset = get_asset_data( + mrq_preset, "MoviePipelineMasterConfig" + ) + unreal.log( + f"Setting the movie pipeline preset to `{mrq_preset_data_asset.asset_name}`" + ) + render_job.set_configuration(mrq_preset_data_asset.get_asset()) + + # MRQ added the ability to enable and disable jobs. Check to see is a job + # is disabled and enable it. The assumption is we want to render this + # particular job. + # Note this try/except block is for backwards compatibility + try: + if not render_job.enabled: + render_job.enabled = True + except AttributeError: + pass + + # If we have a shot list, iterate over the shots in the sequence + # and disable anything that's not in the shot list. If no shot list is + # provided render all the shots in the sequence + if shots: + for shot in render_job.shot_info: + if shot.inner_name in shots or (shot.outer_name in shots): + shot.enabled = True + else: + unreal.log_warning( + f"Disabling shot `{shot.inner_name}` from current render job `{render_job.job_name}`" + ) + shot.enabled = False + + try: + # Execute the render. This will execute the render based on whether + # its remote or local + executor = render_jobs( + is_remote, + remote_batch_name=remote_batch_name, + remote_job_preset=remote_job_preset, + is_cmdline=is_cmdline, + executor_instance=executor_instance, + output_dir_override=output_dir_override, + output_filename_override=output_filename_override + ) + + except Exception as err: + unreal.log_error( + f"An error occurred executing the render.\n\tError: {err}" + ) + raise + + return executor + + +def _process_args(args): + """ + Function to process the arguments for the sequence subcommand + :param args: Parsed Arguments from parser + """ + + return render_current_sequence( + args.sequence, + args.map, + args.mrq_preset, + user=args.user, + shots=args.shots, + is_remote=args.remote, + is_cmdline=args.cmdline, + remote_batch_name=args.batch_name, + remote_job_preset=args.deadline_job_preset, + output_dir_override=args.output_override, + output_filename_override=args.filename_override + ) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/utils.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/utils.py new file mode 100644 index 0000000000..1f75099066 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_cli_modes/utils.py @@ -0,0 +1,360 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +import unreal + +from getpass import getuser + +# Get a render queue +pipeline_subsystem = unreal.get_editor_subsystem( + unreal.MoviePipelineQueueSubsystem +) + +# Get the project settings +project_settings = unreal.get_default_object( + unreal.MovieRenderPipelineProjectSettings +) + +# Get the pipeline queue +movie_pipeline_queue = pipeline_subsystem.get_queue() + +pipeline_executor = None + + +def get_executor_instance(is_remote): + """ + Method to return an instance of a render executor + + :param bool is_remote: Flag to use the local or remote executor class + :return: Executor instance + """ + is_soft_class_object = True + # Convert the SoftClassPath into a SoftClassReference. + # local executor class from the project settings + try: + class_ref = unreal.SystemLibrary.conv_soft_class_path_to_soft_class_ref( + project_settings.default_local_executor + ) + # For Backwards compatibility. Older version returned a class object from + # the project settings + except TypeError: + class_ref = project_settings.default_local_executor + is_soft_class_object = False + + if is_remote: + try: + # Get the remote executor class + class_ref = ( + unreal.SystemLibrary.conv_soft_class_path_to_soft_class_ref( + project_settings.default_remote_executor + ) + ) + except TypeError: + class_ref = project_settings.default_remote_executor + is_soft_class_object = False + + if not class_ref: + raise RuntimeError( + "Failed to get a class reference to the default executor from the " + "project settings. Check the logs for more details." + ) + + if is_soft_class_object: + # Get the executor class as this is required to get an instance of + # the executor + executor_class = unreal.SystemLibrary.load_class_asset_blocking( + class_ref + ) + else: + executor_class = class_ref + + global pipeline_executor + pipeline_executor = unreal.new_object(executor_class) + + return pipeline_executor + + +def execute_render(is_remote=False, executor_instance=None, is_cmdline=False): + """ + Starts a render + + :param bool is_remote: Flag to use the local or remote executor class + :param executor_instance: Executor instance used for rendering + :param bool is_cmdline: Flag to determine if the render was executed from a commandline. + """ + + if not executor_instance: + executor_instance = get_executor_instance(is_remote) + + if is_cmdline: + setup_editor_exit_callback(executor_instance) + + # Start the Render + unreal.log("MRQ job started...") + unreal.log(f"Is remote render: {is_remote}") + + pipeline_subsystem.render_queue_with_executor_instance(executor_instance) + + return executor_instance + + +def setup_editor_exit_callback(executor_instance): + """ + Setup callbacks for when you need to close the editor after a render + + :param executor_instance: Movie Pipeline executor instance + """ + + unreal.log("Executed job from commandline, setting up shutdown callback..") + + # add a callable to the executor to be executed when the pipeline is done rendering + executor_instance.on_executor_finished_delegate.add_callable( + shutdown_editor + ) + # add a callable to the executor to be executed when the pipeline fails to render + executor_instance.on_executor_errored_delegate.add_callable( + executor_failed_callback + ) + + +def shutdown_editor(movie_pipeline=None, results=None): + """ + This method shutdown the editor + """ + unreal.log("Rendering is complete! Exiting...") + unreal.SystemLibrary.quit_editor() + + +def executor_failed_callback(executor, pipeline, is_fatal, error): + """ + Callback executed when a job fails in the editor + """ + unreal.log_error( + f"An error occurred while executing a render.\n\tError: {error}" + ) + + unreal.SystemLibrary.quit_editor() + + +def get_asset_data(name_or_path, asset_class): + """ + Get the asset data for the asset name or path based on its class. + + :param str name_or_path: asset name or package name + :param str asset_class: Asset class filter to use when looking for assets in registry + :raises RuntimeError + :return: Asset package if it exists + """ + # Get all the specified class assets in the project. + # This is the only mechanism we can think of at the moment to allow + # shorter path names in the commandline interface. This will allow users + # to only provide the asset name or the package path in the commandline + # interface based on the assumption that all assets are unique + asset_registry = unreal.AssetRegistryHelpers.get_asset_registry() + + # If the asset registry is still loading, wait for it to finish + if asset_registry.is_loading_assets(): + unreal.log_warning("Asset Registry is loading, waiting to complete...") + asset_registry.wait_for_completion() + + unreal.log("Asset Registry load complete!") + + assets = asset_registry.get_assets( + unreal.ARFilter(class_names=[asset_class]) + ) + + # This lookup could potentially be very slow + for asset in assets: + # If a package name is provided lookup the package path. If a + # packages startwith a "/" this signifies a content package. Content + # packages can either be Game or plugin. Game content paths start + # with "/Game" and plugin contents startswith / + if name_or_path.startswith("/"): + # Reconstruct the package path into a package name. eg. + # /my/package_name.package_name -> /my/package_name + name_or_path = name_or_path.split(".")[0] + if asset.package_name == name_or_path: + return asset + else: + if asset.asset_name == name_or_path: + return asset + else: + raise RuntimeError(f"`{name_or_path}` could not be found!") + + +def setup_remote_render_jobs(batch_name, job_preset, render_jobs): + """ + This function sets up a render job with the options for a remote render. + This is configured currently for deadline jobs. + + :param str batch_name: Remote render batch name + :param str job_preset: Job Preset to use for job details + :param list render_jobs: The list of render jobs to apply the ars to + """ + + unreal.log("Setting up Remote render executor.. ") + + # Update the settings on the render job. + # Currently, this is designed to work with deadline + + # Make sure we have the relevant attribute on the jobs. This remote cli + # setup can be used with out-of-process rendering and not just deadline. + unset_job_properties = [] + for job in render_jobs: + if hasattr(job, "batch_name") and not batch_name: + unset_job_properties.append(job.name) + + if hasattr(job, "job_preset") and not job_preset: + unset_job_properties.append(job.name) + + # If we find a deadline property on the job, and it's not set, raise an + # error + if unset_job_properties: + raise RuntimeError( + "These jobs did not have a batch name, preset name or preset " + "library set. This is a requirement for deadline remote rendering. " + "{jobs}".format( + jobs="\n".join(unset_job_properties)) + ) + + for render_job in render_jobs: + render_job.batch_name = batch_name + render_job.job_preset = get_asset_data( + job_preset, + "DeadlineJobPreset" + ).get_asset() + + +def set_job_state(job, enable=False): + """ + This method sets the state on a current job to enabled or disabled + + :param job: MoviePipeline job to enable/disable + :param bool enable: Flag to determine if a job should be or not + """ + + if enable: + # Check for an enable attribute on the job and if not move along. + # Note: `Enabled` was added to MRQ that allows disabling all shots in + # a job. This also enables backwards compatibility. + try: + if not job.enabled: + job.enabled = True + except AttributeError: + # Legacy implementations assumes the presence of a job means its + # enabled + return + + try: + if job.enabled: + job.enabled = False + except AttributeError: + # If the attribute is not available, go through and disable all the + # associated shots. This behaves like a disabled job + for shot in job.shot_info: + unreal.log_warning( + f"Disabling shot `{shot.inner_name}` from current render job `{job.job_name}`" + ) + shot.enabled = False + + +def update_render_output(job, output_dir=None, output_filename=None): + """ + Updates that output directory and filename on a render job + + :param job: MRQ job + :param str output_dir: Output directory for renders + :param str output_filename: Output filename + """ + + # Get the job output settings + output_setting = job.get_configuration().find_setting_by_class( + unreal.MoviePipelineOutputSetting + ) + + if output_dir: + new_output_dir = unreal.DirectoryPath() + new_output_dir.set_editor_property( + "path", + output_dir + ) + unreal.log_warning( + f"Overriding output directory! New output directory is `{output_dir}`." + ) + output_setting.output_directory = new_output_dir + + if output_filename: + unreal.log_warning( + "Overriding filename format! New format is `{output_filename}`." + ) + + output_setting.file_name_format = output_filename + + +def update_queue( + jobs=None, + shots=None, + all_shots=False, + user=None, +): + """ + This function configures and renders a job based on the arguments + + :param list jobs: MRQ jobs to render + :param list shots: Shots to render from jobs + :param bool all_shots: Flag for rendering all shots + :param str user: Render user + """ + + # Iterate over all the jobs and make sure the jobs we want to + # render are enabled. + # All jobs that are not going to be rendered will be disabled if the + # job enabled attribute is not set or their shots disabled. + # The expectation is, If a job name is specified, we want to render the + # current state of that job. + # If a shot list is specified, we want to only render that shot alongside + # any other whole jobs (job states) that are explicitly specified, + # else other jobs or shots that are not + # needed are disabled + for job in movie_pipeline_queue.get_jobs(): + enable_job = False + + # Get a list of jobs to enable. + # This will enable jobs in their current queue state awaiting other + # modifications if shots are provided, if only the job name is + # specified, the job will be rendered in its current state + if jobs and (job.job_name in jobs): + enable_job = True + + # If we are told to render all shots. Enable all shots for all jobs + if all_shots: + for shot in job.shot_info: + shot.enabled = True + + # set the user for the current job + job.author = user or getuser() + + # Set the job to enabled and move on to the next job + set_job_state(job, enable=True) + + continue + + # If we have a list of shots, go through the shots associated + # with this job, enable the shots that need to be rendered and + # disable the others + if shots and (not enable_job): + for shot in job.shot_info: + if shot.inner_name in shots or (shot.outer_name in shots): + shot.enabled = True + enable_job = True + else: + unreal.log_warning( + f"Disabling shot `{shot.inner_name}` from current render job `{job.job_name}`" + ) + shot.enabled = False + + if enable_job: + # Set the author on the job + job.author = user or getuser() + + # Set the state of the job by enabling or disabling it. + set_job_state(job, enable=enable_job) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_rpc.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_rpc.py new file mode 100644 index 0000000000..3ee890a320 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/mrq_rpc.py @@ -0,0 +1,485 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +import os +import argparse +import json + +import unreal + +from deadline_rpc import BaseRPC + +from mrq_cli_modes import ( + render_queue_manifest, + render_current_sequence, + render_queue_asset, + utils, +) + + +class MRQRender(BaseRPC): + """ + Class to execute deadline MRQ renders using RPC + """ + + def __init__(self, *args, **kwargs): + """ + Constructor + """ + super(MRQRender, self).__init__(*args, **kwargs) + + self._render_cmd = ["mrq_cli.py"] + + # Keep track of the task data + self._shot_data = None + self._queue = None + self._manifest = None + self._sequence_data = None + + def _get_queue(self): + """ + Render a MRQ queue asset + + :return: MRQ queue asset name + """ + if not self._queue: + self._queue = self.proxy.get_job_extra_info_key_value("queue_name") + + return self._queue + + def _get_sequence_data(self): + """ + Get sequence data + + :return: Sequence data + """ + if not self._sequence_data: + self._sequence_data = self.proxy.get_job_extra_info_key_value( + "sequence_render" + ) + + return self._sequence_data + + def _get_serialized_pipeline(self): + """ + Get Serialized pipeline from Deadline + + :return: + """ + if not self._manifest: + serialized_pipeline = self.proxy.get_job_extra_info_key_value( + "serialized_pipeline" + ) + if not serialized_pipeline: + return + + unreal.log( + f"Executing Serialized Pipeline: `{serialized_pipeline}`" + ) + + # create temp manifest folder + movieRenderPipeline_dir = os.path.join( + unreal.SystemLibrary.get_project_saved_directory(), + "MovieRenderPipeline", + "TempManifests", + ) + + if not os.path.exists(movieRenderPipeline_dir ): + os.makedirs(movieRenderPipeline_dir ) + + # create manifest file + manifest_file = unreal.Paths.create_temp_filename( + movieRenderPipeline_dir , + prefix='TempManifest', + extension='.utxt') + + unreal.log(f"Saving Manifest file `{manifest_file}`") + + # Dump the manifest data into the manifest file + with open(manifest_file, "w") as manifest: + manifest.write(serialized_pipeline) + + self._manifest = manifest_file + + return self._manifest + + def execute(self): + """ + Starts the render execution + """ + + # shots are listed as a dictionary of task id -> shotnames + # i.e {"O": "my_new_shot"} or {"20", "shot_1,shot_2,shot_4"} + + # Get the task data and cache it + if not self._shot_data: + self._shot_data = json.loads( + self.proxy.get_job_extra_info_key_value("shot_info") + ) + + # Get any output overrides + output_dir = self.proxy.get_job_extra_info_key_value( + "output_directory_override" + ) + + # Resolve any path mappings in the directory name. The server expects + # a list of paths, but we only ever expect one. So wrap it in a list + # if we have an output directory + if output_dir: + output_dir = self.proxy.check_path_mappings([output_dir]) + output_dir = output_dir[0] + + # Get the filename format + filename_format = self.proxy.get_job_extra_info_key_value( + "filename_format_override" + ) + + # Resolve any path mappings in the filename. The server expects + # a list of paths, but we only ever expect one. So wrap it in a list + if filename_format: + filename_format = self.proxy.check_path_mappings([filename_format]) + filename_format = filename_format[0] + + # get the shots for the current task + current_task_data = self._shot_data.get(str(self.current_task_id), None) + + if not current_task_data: + self.proxy.fail_render("There are no task data to execute!") + return + + shots = current_task_data.split(",") + + if self._get_queue(): + return self.render_queue( + self._get_queue(), + shots, + output_dir_override=output_dir if output_dir else None, + filename_format_override=filename_format if filename_format else None + ) + + if self._get_serialized_pipeline(): + return self.render_serialized_pipeline( + self._get_serialized_pipeline(), + shots, + output_dir_override=output_dir if output_dir else None, + filename_format_override=filename_format if filename_format else None + ) + + if self._get_sequence_data(): + render_data = json.loads(self._get_sequence_data()) + sequence = render_data.get("sequence_name") + level = render_data.get("level_name") + mrq_preset = render_data.get("mrq_preset_name") + return self.render_sequence( + sequence, + level, + mrq_preset, + shots, + output_dir_override=output_dir if output_dir else None, + filename_format_override=filename_format if filename_format else None + ) + + def render_queue( + self, + queue_path, + shots, + output_dir_override=None, + filename_format_override=None + ): + """ + Executes a render from a queue + + :param str queue_path: Name/path of the queue asset + :param list shots: Shots to render + :param str output_dir_override: Movie Pipeline output directory + :param str filename_format_override: Movie Pipeline filename format override + """ + unreal.log(f"Executing Queue asset `{queue_path}`") + unreal.log(f"Rendering shots: {shots}") + + # Get an executor instance + executor = self._get_executor_instance() + + # Set executor callbacks + + # Set shot finished callbacks + executor.on_individual_shot_work_finished_delegate.add_callable( + self._on_individual_shot_finished_callback + ) + + # Set executor finished callbacks + executor.on_executor_finished_delegate.add_callable( + self._on_job_finished + ) + executor.on_executor_errored_delegate.add_callable(self._on_job_failed) + + # Render queue with executor + render_queue_asset( + queue_path, + shots=shots, + user=self.proxy.get_job_user(), + executor_instance=executor, + output_dir_override=output_dir_override, + output_filename_override=filename_format_override + ) + + def render_serialized_pipeline( + self, + manifest_file, + shots, + output_dir_override=None, + filename_format_override=None + ): + """ + Executes a render using a manifest file + + :param str manifest_file: serialized pipeline used to render a manifest file + :param list shots: Shots to render + :param str output_dir_override: Movie Pipeline output directory + :param str filename_format_override: Movie Pipeline filename format override + """ + unreal.log(f"Rendering shots: {shots}") + + # Get an executor instance + executor = self._get_executor_instance() + + # Set executor callbacks + + # Set shot finished callbacks + executor.on_individual_shot_work_finished_delegate.add_callable( + self._on_individual_shot_finished_callback + ) + + # Set executor finished callbacks + executor.on_executor_finished_delegate.add_callable( + self._on_job_finished + ) + executor.on_executor_errored_delegate.add_callable(self._on_job_failed) + + render_queue_manifest( + manifest_file, + shots=shots, + user=self.proxy.get_job_user(), + executor_instance=executor, + output_dir_override=output_dir_override, + output_filename_override=filename_format_override + ) + + def render_sequence( + self, + sequence, + level, + mrq_preset, + shots, + output_dir_override=None, + filename_format_override=None + ): + """ + Executes a render using a sequence level and map + + :param str sequence: Level Sequence name + :param str level: Level + :param str mrq_preset: MovieRenderQueue preset + :param list shots: Shots to render + :param str output_dir_override: Movie Pipeline output directory + :param str filename_format_override: Movie Pipeline filename format override + """ + unreal.log( + f"Executing sequence `{sequence}` with map `{level}` " + f"and mrq preset `{mrq_preset}`" + ) + unreal.log(f"Rendering shots: {shots}") + + # Get an executor instance + executor = self._get_executor_instance() + + # Set executor callbacks + + # Set shot finished callbacks + executor.on_individual_shot_work_finished_delegate.add_callable( + self._on_individual_shot_finished_callback + ) + + # Set executor finished callbacks + executor.on_executor_finished_delegate.add_callable( + self._on_job_finished + ) + executor.on_executor_errored_delegate.add_callable(self._on_job_failed) + + render_current_sequence( + sequence, + level, + mrq_preset, + shots=shots, + user=self.proxy.get_job_user(), + executor_instance=executor, + output_dir_override=output_dir_override, + output_filename_override=filename_format_override + ) + + @staticmethod + def _get_executor_instance(): + """ + Gets an instance of the movie pipeline executor + + :return: Movie Pipeline Executor instance + """ + return utils.get_executor_instance(False) + + def _on_individual_shot_finished_callback(self, shot_params): + """ + Callback to execute when a shot is done rendering + + :param shot_params: Movie pipeline shot params + """ + unreal.log("Executing On individual shot callback") + + # Since MRQ cannot parse certain parameters/arguments till an actual + # render is complete (e.g. local version numbers), we will use this as + # an opportunity to update the deadline proxy on the actual frame + # details that were rendered + + file_patterns = set() + + # Iterate over all the shots in the shot list (typically one shot as + # this callback is executed) on a shot by shot bases. + for shot in shot_params.shot_data: + for pass_identifier in shot.render_pass_data: + + # only get the first file + paths = shot.render_pass_data[pass_identifier].file_paths + + # make sure we have paths to iterate on + if len(paths) < 1: + continue + + # we only need the ext from the first file + ext = os.path.splitext(paths[0])[1].replace(".", "") + + # Make sure we actually have an extension to use + if not ext: + continue + + # Get the current job output settings + output_settings = shot_params.job.get_configuration().find_or_add_setting_by_class( + unreal.MoviePipelineOutputSetting + ) + + resolve_params = unreal.MoviePipelineFilenameResolveParams() + + # Set the camera name from the shot data + resolve_params.camera_name_override = shot_params.shot_data[ + 0 + ].shot.inner_name + + # set the shot name from the shot data + resolve_params.shot_name_override = shot_params.shot_data[ + 0 + ].shot.outer_name + + # Get the zero padding configuration + resolve_params.zero_pad_frame_number_count = ( + output_settings.zero_pad_frame_numbers + ) + + # Update the formatting of frame numbers based on the padding. + # Deadline uses # (* padding) to display the file names in a job + resolve_params.file_name_format_overrides[ + "frame_number" + ] = "#" * int(output_settings.zero_pad_frame_numbers) + + # Update the extension + resolve_params.file_name_format_overrides["ext"] = ext + + # Set the job on the resolver + resolve_params.job = shot_params.job + + # Set the initialization time on the resolver + resolve_params.initialization_time = ( + unreal.MoviePipelineLibrary.get_job_initialization_time( + shot_params.pipeline + ) + ) + + # Set the shot overrides + resolve_params.shot_override = shot_params.shot_data[0].shot + + combined_path = unreal.Paths.combine( + [ + output_settings.output_directory.path, + output_settings.file_name_format, + ] + ) + + # Resolve the paths + # The returned values are a tuple with the resolved paths as the + # first index. Get the paths and add it to a list + ( + path, + _, + ) = unreal.MoviePipelineLibrary.resolve_filename_format_arguments( + combined_path, resolve_params + ) + + # Make sure we are getting the right type from resolved + # arguments + if isinstance(path, str): + # Sanitize the paths + path = os.path.normpath(path).replace("\\", "/") + file_patterns.add(path) + + elif isinstance(path, list): + + file_patterns.update( + set( + [ + os.path.normpath(p).replace("\\", "/") + for p in path + ] + ) + ) + + else: + raise RuntimeError( + f"Expected the shot file paths to be a " + f"string or list but got: {type(path)}" + ) + + if file_patterns: + unreal.log(f'Updating remote filenames: {", ".join(file_patterns)}') + + # Update the paths on the deadline job + self.proxy.update_job_output_filenames(list(file_patterns)) + + def _on_job_finished(self, executor=None, success=None): + """ + Callback to execute on executor finished + """ + # TODO: add th ability to set the output directory for the task + unreal.log(f"Task {self.current_task_id} complete!") + self.task_complete = True + + def _on_job_failed(self, executor, pipeline, is_fatal, error): + """ + Callback to execute on job failed + """ + unreal.log_error(f"Is fatal job error: {is_fatal}") + unreal.log_error( + f"An error occurred executing task `{self.current_task_id}`: \n\t{error}" + ) + self.proxy.fail_render(error) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="This parser is used to run an mrq render with rpc" + ) + parser.add_argument( + "--port", type=int, default=None, help="Port number for rpc server" + ) + parser.add_argument( + "--verbose", action="store_true", help="Enable verbose logging" + ) + + arguments = parser.parse_args() + + MRQRender(port=arguments.port, verbose=arguments.verbose) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/pipeline_actions/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/pipeline_actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/pipeline_actions/render_queue_action.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/pipeline_actions/render_queue_action.py new file mode 100644 index 0000000000..1dfb1cba8c --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/pipeline_actions/render_queue_action.py @@ -0,0 +1,242 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-in +import argparse +import re +from pathlib import Path +from getpass import getuser +from collections import OrderedDict + +# Internal +from deadline_service import get_global_deadline_service_instance +from deadline_job import DeadlineJob +from deadline_menus import DeadlineToolBarMenu +from deadline_utils import get_deadline_info_from_preset + +# Third Party +import unreal + + +# Editor Utility Widget path +# NOTE: This is very fragile and can break if naming or pathing changes +EDITOR_UTILITY_WIDGET = "/MoviePipelineDeadline/Widgets/QueueAssetSubmitter" + + +def _launch_queue_asset_submitter(): + """ + Callback to execute to launch the queue asset submitter + """ + unreal.log("Launching queue submitter.") + + submitter_widget = unreal.EditorAssetLibrary.load_asset(EDITOR_UTILITY_WIDGET) + + # Get editor subsystem + subsystem = unreal.get_editor_subsystem(unreal.EditorUtilitySubsystem) + + # Spawn the submitter widget + subsystem.spawn_and_register_tab(submitter_widget) + + +def register_menu_action(): + """ + Creates the toolbar menu + """ + + if not _validate_euw_asset_exists(): + unreal.log_warning( + f"EUW `{EDITOR_UTILITY_WIDGET}` does not exist in the Asset registry!" + ) + return + + toolbar = DeadlineToolBarMenu() + + toolbar.register_submenu( + "SubmitMRQAsset", + _launch_queue_asset_submitter, + label_name="Submit Movie Render Queue Asset", + description="Submits a Movie Render Queue asset to Deadline" + ) + + +def _validate_euw_asset_exists(): + """ + Make sure our reference editor utility widget exists in + the asset registry + :returns: Array(AssetData) or None + """ + + asset_registry = unreal.AssetRegistryHelpers.get_asset_registry() + asset_data = asset_registry.get_assets_by_package_name( + EDITOR_UTILITY_WIDGET, + include_only_on_disk_assets=True + ) + + return True if asset_data else False + + +def _execute_submission(args): + """ + Creates and submits the queue asset as a job to Deadline + :param args: Commandline args + """ + + unreal.log("Executing job submission") + + job_info, plugin_info = get_deadline_info_from_preset( + job_preset=unreal.load_asset(args.submission_job_preset) + ) + + # Due to some odd behavior in how Unreal passes string to the argparse, + # it adds extra quotes to the string, so we will strip the quotes out to get + # a single string representation. + batch_name = args.batch_name[0].strip('"') + + # Update the Job Batch Name + job_info["BatchName"] = batch_name + + # Set the name of the job if one is not set + if not job_info.get("Name"): + job_info["Name"] = Path(args.queue_asset).stem + + # Set the Author of the job + if not job_info.get("UserName"): + job_info["UserName"] = getuser() + + # Arguments to pass to the executable. + command_args = [] + + # Append all of our inherited command line arguments from the editor. + in_process_executor_settings = unreal.get_default_object( + unreal.MoviePipelineInProcessExecutorSettings + ) + inherited_cmds = in_process_executor_settings.inherited_command_line_arguments + + # Sanitize the commandline by removing any execcmds that may + # have passed through the commandline. + # We remove the execcmds because, in some cases, users may execute a + # script that is local to their editor build for some automated + # workflow but this is not ideal on the farm. We will expect all + # custom startup commands for rendering to go through the `Start + # Command` in the MRQ settings. + inherited_cmds = re.sub( + ".(?P-execcmds=[\w\W]+[\'\"])", + "", + inherited_cmds + ) + + command_args.extend(inherited_cmds.split(" ")) + command_args.extend( + in_process_executor_settings.additional_command_line_arguments.split( + " " + ) + ) + + # Build out custom queue command that will be used to render the queue on + # the farm. + queue_cmds = [ + "py", + "mrq_cli.py", + "queue", + str(args.queue_asset), + "--remote", + "--cmdline", + "--batch_name", + batch_name, + "--deadline_job_preset", + str(args.remote_job_preset) + ] + + command_args.extend( + [ + "-nohmd", + "-windowed", + "-ResX=1280", + "-ResY=720", + '-execcmds="{cmds}"'.format(cmds=" ".join(queue_cmds)) + ] + ) + + # Append the commandline args from the deadline plugin info + command_args.extend(plugin_info.get("CommandLineArguments", "").split(" ")) + + # Sanitize the commandline args + command_args = [arg for arg in command_args if arg not in [None, "", " "]] + + # Remove all duplicates from the command args + full_cmd_args = " ".join(list(OrderedDict.fromkeys(command_args))) + + # Get the current launched project file + if unreal.Paths.is_project_file_path_set(): + # Trim down to just "Game.uproject" instead of absolute path. + game_name_or_project_file = ( + unreal.Paths.convert_relative_path_to_full( + unreal.Paths.get_project_file_path() + ) + ) + + else: + raise RuntimeError( + "Failed to get a project name. Please specify a project!" + ) + + if not plugin_info.get("ProjectFile"): + project_file = plugin_info.get("ProjectFile", game_name_or_project_file) + plugin_info["ProjectFile"] = project_file + + # Update the plugin info. "CommandLineMode" tells Deadline to not use an + # interactive process to execute the job but launch it like a shell + # command and wait for the process to exit. `--cmdline` in our + # commandline arguments will tell the editor to shut down when the job is + # complete + plugin_info.update( + { + "CommandLineArguments": full_cmd_args, + "CommandLineMode": "true" + } + ) + + # Create a Deadline job from the selected preset library + deadline_job = DeadlineJob(job_info, plugin_info) + + deadline_service = get_global_deadline_service_instance() + + # Submit the Deadline Job + job_id = deadline_service.submit_job(deadline_job) + + unreal.log(f"Deadline job submitted. JobId: {job_id}") + + +if __name__ == "__main__": + unreal.log("Executing queue submitter action") + + parser = argparse.ArgumentParser( + description="Submits queue asset to Deadline", + add_help=False, + ) + parser.add_argument( + "--batch_name", + type=str, + nargs='+', + help="Deadline Batch Name" + ) + parser.add_argument( + "--submission_job_preset", + type=str, + help="Submitter Deadline Job Preset" + ) + parser.add_argument( + "--remote_job_preset", + type=str, + help="Remote Deadline Job Preset" + ) + parser.add_argument( + "--queue_asset", + type=str, + help="Movie Pipeline Queue Asset" + ) + + parser.set_defaults(func=_execute_submission) + + # Parse the arguments and execute the function callback + arguments = parser.parse_args() + arguments.func(arguments) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/remote_executor.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/remote_executor.py new file mode 100644 index 0000000000..66f5c4fc24 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Python/remote_executor.py @@ -0,0 +1,479 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-In +import os +import re +import json +import traceback +from collections import OrderedDict + +# External +import unreal + +from deadline_service import get_global_deadline_service_instance +from deadline_job import DeadlineJob +from deadline_utils import get_deadline_info_from_preset + + +@unreal.uclass() +class MoviePipelineDeadlineRemoteExecutor(unreal.MoviePipelineExecutorBase): + """ + This class defines the editor implementation for Deadline (what happens when you + press 'Render (Remote)', which is in charge of taking a movie queue from the UI + and processing it into something Deadline can handle. + """ + + # The queue we are working on, null if no queue has been provided. + pipeline_queue = unreal.uproperty(unreal.MoviePipelineQueue) + job_ids = unreal.uproperty(unreal.Array(str)) + + # A MoviePipelineExecutor implementation must override this. + @unreal.ufunction(override=True) + def execute(self, pipeline_queue): + """ + This is called when the user presses Render (Remote) in the UI. We will + split the queue up into multiple jobs. Each job will be submitted to + deadline separately, with each shot within the job split into one Deadline + task per shot. + """ + + unreal.log(f"Asked to execute Queue: {pipeline_queue}") + unreal.log(f"Queue has {len(pipeline_queue.get_jobs())} jobs") + + # Don't try to process empty/null Queues, no need to send them to + # Deadline. + if not pipeline_queue or (not pipeline_queue.get_jobs()): + self.on_executor_finished_impl() + return + + # The user must save their work and check it in so that Deadline + # can sync it. + dirty_packages = [] + dirty_packages.extend( + unreal.EditorLoadingAndSavingUtils.get_dirty_content_packages() + ) + dirty_packages.extend( + unreal.EditorLoadingAndSavingUtils.get_dirty_map_packages() + ) + + # Sometimes the dialog will return `False` + # even when there are no packages to save. so we are + # being explict about the packages we need to save + if dirty_packages: + if not unreal.EditorLoadingAndSavingUtils.save_dirty_packages_with_dialog( + True, True + ): + message = ( + "One or more jobs in the queue have an unsaved map/content. " + "{packages} " + "Please save and check-in all work before submission.".format( + packages="\n".join(dirty_packages) + ) + ) + + unreal.log_error(message) + unreal.EditorDialog.show_message( + "Unsaved Maps/Content", message, unreal.AppMsgType.OK + ) + self.on_executor_finished_impl() + return + + # Make sure all the maps in the queue exist on disk somewhere, + # unsaved maps can't be loaded on the remote machine, and it's common + # to have the wrong map name if you submit without loading the map. + has_valid_map = ( + unreal.MoviePipelineEditorLibrary.is_map_valid_for_remote_render( + pipeline_queue.get_jobs() + ) + ) + if not has_valid_map: + message = ( + "One or more jobs in the queue have an unsaved map as " + "their target map. " + "These unsaved maps cannot be loaded by an external process, " + "and the render has been aborted." + ) + unreal.log_error(message) + unreal.EditorDialog.show_message( + "Unsaved Maps", message, unreal.AppMsgType.OK + ) + self.on_executor_finished_impl() + return + + self.pipeline_queue = pipeline_queue + + deadline_settings = unreal.get_default_object( + unreal.MoviePipelineDeadlineSettings + ) + + # Arguments to pass to the executable. This can be modified by settings + # in the event a setting needs to be applied early. + # In the format of -foo -bar + # commandLineArgs = "" + command_args = [] + + # Append all of our inherited command line arguments from the editor. + in_process_executor_settings = unreal.get_default_object( + unreal.MoviePipelineInProcessExecutorSettings + ) + inherited_cmds = in_process_executor_settings.inherited_command_line_arguments + + # Sanitize the commandline by removing any execcmds that may + # have passed through the commandline. + # We remove the execcmds because, in some cases, users may execute a + # script that is local to their editor build for some automated + # workflow but this is not ideal on the farm. We will expect all + # custom startup commands for rendering to go through the `Start + # Command` in the MRQ settings. + inherited_cmds = re.sub( + ".*(?P-execcmds=[\s\S]+[\'\"])", + "", + inherited_cmds + ) + + command_args.extend(inherited_cmds.split(" ")) + command_args.extend( + in_process_executor_settings.additional_command_line_arguments.split( + " " + ) + ) + + command_args.extend( + ["-nohmd", "-windowed", f"-ResX=1280", f"-ResY=720"] + ) + + # Get the project level preset + project_preset = deadline_settings.default_job_preset + + # Get the job and plugin info string. + # Note: + # Sometimes a project level default may not be set, + # so if this returns an empty dictionary, that is okay + # as we primarily care about the job level preset. + # Catch any exceptions here and continue + try: + project_job_info, project_plugin_info = get_deadline_info_from_preset(job_preset=project_preset) + + except Exception: + pass + + deadline_service = get_global_deadline_service_instance() + + for job in self.pipeline_queue.get_jobs(): + + unreal.log(f"Submitting Job `{job.job_name}` to Deadline...") + + try: + # Create a Deadline job object with the default project level + # job info and plugin info + deadline_job = DeadlineJob(project_job_info, project_plugin_info) + + deadline_job_id = self.submit_job( + job, deadline_job, command_args, deadline_service + ) + + except Exception as err: + unreal.log_error( + f"Failed to submit job `{job.job_name}` to Deadline, aborting render. \n\tError: {str(err)}" + ) + unreal.log_error(traceback.format_exc()) + self.on_executor_errored_impl(None, True, str(err)) + unreal.EditorDialog.show_message( + "Submission Result", + f"Failed to submit job `{job.job_name}` to Deadline with error: {str(err)}. " + f"See log for more details.", + unreal.AppMsgType.OK, + ) + self.on_executor_finished_impl() + return + + if not deadline_job_id: + message = ( + f"A problem occurred submitting `{job.job_name}`. " + f"Either the job doesn't have any data to submit, " + f"or an error occurred getting the Deadline JobID. " + f"This job status would not be reflected in the UI. " + f"Check the logs for more details." + ) + unreal.log_warning(message) + unreal.EditorDialog.show_message( + "Submission Result", message, unreal.AppMsgType.OK + ) + return + + else: + unreal.log(f"Deadline JobId: {deadline_job_id}") + self.job_ids.append(deadline_job_id) + + # Store the Deadline JobId in our job (the one that exists in + # the queue, not the duplicate) so we can match up Movie + # Pipeline jobs with status updates from Deadline. + job.user_data = deadline_job_id + + # Now that we've sent a job to Deadline, we're going to request a status + # update on them so that they transition from "Ready" to "Queued" or + # their actual status in Deadline. self.request_job_status_update( + # deadline_service) + + message = ( + f"Successfully submitted {len(self.job_ids)} jobs to Deadline. JobIds: {', '.join(self.job_ids)}. " + f"\nPlease use Deadline Monitor to track render job statuses" + ) + unreal.log(message) + + unreal.EditorDialog.show_message( + "Submission Result", message, unreal.AppMsgType.OK + ) + + # Set the executor to finished + self.on_executor_finished_impl() + + @unreal.ufunction(override=True) + def is_rendering(self): + # Because we forward unfinished jobs onto another service when the + # button is pressed, they can always submit what is in the queue and + # there's no need to block the queue. + # A MoviePipelineExecutor implementation must override this. If you + # override a ufunction from a base class you don't specify the return + # type or parameter types. + return False + + def submit_job(self, job, deadline_job, command_args, deadline_service): + """ + Submit a new Job to Deadline + :param job: Queued job to submit + :param deadline_job: Deadline job object + :param list[str] command_args: Commandline arguments to configure for the Deadline Job + :param deadline_service: An instance of the deadline service object + :returns: Deadline Job ID + :rtype: str + """ + + # Get the Job Info and plugin Info + # If we have a preset set on the job, get the deadline submission details + try: + job_info, plugin_info = get_deadline_info_from_preset(job_preset_struct=job.get_deadline_job_preset_struct_with_overrides()) + + # Fail the submission if any errors occur + except Exception as err: + raise RuntimeError( + f"An error occurred getting the deadline job and plugin " + f"details. \n\tError: {err} " + ) + + # check for required fields in pluginInfo + if "Executable" not in plugin_info: + raise RuntimeError("An error occurred formatting the Plugin Info string. \n\tMissing \"Executable\" key") + elif not plugin_info["Executable"]: + raise RuntimeError(f"An error occurred formatting the Plugin Info string. \n\tExecutable value cannot be empty") + if "ProjectFile" not in plugin_info: + raise RuntimeError("An error occurred formatting the Plugin Info string. \n\tMissing \"ProjectFile\" key") + elif not plugin_info["ProjectFile"]: + raise RuntimeError(f"An error occurred formatting the Plugin Info string. \n\tProjectFile value cannot be empty") + + # Update the job info with overrides from the UI + if job.batch_name: + job_info["BatchName"] = job.batch_name + + if hasattr(job, "comment") and not job_info.get("Comment"): + job_info["Comment"] = job.comment + + if not job_info.get("Name") or job_info["Name"] == "Untitled": + job_info["Name"] = job.job_name + + if job.author: + job_info["UserName"] = job.author + + if unreal.Paths.is_project_file_path_set(): + # Trim down to just "Game.uproject" instead of absolute path. + game_name_or_project_file = ( + unreal.Paths.convert_relative_path_to_full( + unreal.Paths.get_project_file_path() + ) + ) + + else: + raise RuntimeError( + "Failed to get a project name. Please set a project!" + ) + + # Create a new queue with only this job in it and save it to disk, + # then load it, so we can send it with the REST API + new_queue = unreal.MoviePipelineQueue() + new_job = new_queue.duplicate_job(job) + + duplicated_queue, manifest_path = unreal.MoviePipelineEditorLibrary.save_queue_to_manifest_file( + new_queue + ) + + # Convert the queue to text (load the serialized json from disk) so we + # can send it via deadline, and deadline will write the queue to the + # local machines on job startup. + serialized_pipeline = unreal.MoviePipelineEditorLibrary.convert_manifest_file_to_string( + manifest_path + ) + + # Loop through our settings in the job and let them modify the command + # line arguments/params. + new_job.get_configuration().initialize_transient_settings() + # Look for our Game Override setting to pull the game mode to start + # with. We start with this game mode even on a blank map to override + # the project default from kicking in. + game_override_class = None + + out_url_params = [] + out_command_line_args = [] + out_device_profile_cvars = [] + out_exec_cmds = [] + for setting in new_job.get_configuration().get_all_settings(): + + out_url_params, out_command_line_args, out_device_profile_cvars, out_exec_cmds = setting.build_new_process_command_line_args( + out_url_params, + out_command_line_args, + out_device_profile_cvars, + out_exec_cmds, + ) + + # Set the game override + if setting.get_class() == unreal.MoviePipelineGameOverrideSetting.static_class(): + game_override_class = setting.game_mode_override + + # This triggers the editor to start looking for render jobs when it + # finishes loading. + out_exec_cmds.append("py mrq_rpc.py") + + # Convert the arrays of command line args, device profile cvars, + # and exec cmds into actual commands for our command line. + command_args.extend(out_command_line_args) + + if out_device_profile_cvars: + # -dpcvars="arg0,arg1,..." + command_args.append( + '-dpcvars="{dpcvars}"'.format( + dpcvars=",".join(out_device_profile_cvars) + ) + ) + + if out_exec_cmds: + # -execcmds="cmd0,cmd1,..." + command_args.append( + '-execcmds="{cmds}"'.format(cmds=",".join(out_exec_cmds)) + ) + + # Add support for telling the remote process to wait for the + # asset registry to complete synchronously + command_args.append("-waitonassetregistry") + + # Build a shot-mask from this sequence, to split into the appropriate + # number of tasks. Remove any already-disabled shots before we + # generate a list, otherwise we make unneeded tasks which get sent to + # machines + shots_to_render = [] + for shot_index, shot in enumerate(new_job.shot_info): + if not shot.enabled: + unreal.log( + f"Skipped submitting shot {shot_index} in {job.job_name} " + f"to server due to being already disabled!" + ) + else: + shots_to_render.append(shot.outer_name) + + # If there are no shots enabled, + # "these are not the droids we are looking for", move along ;) + # We will catch this later and deal with it + if not shots_to_render: + unreal.log_warning("No shots enabled in shot mask, not submitting.") + return + + # Divide the job to render by the chunk size + # i.e {"O": "my_new_shot"} or {"0", "shot_1,shot_2,shot_4"} + chunk_size = int(job_info.get("ChunkSize", 1)) + shots = {} + frame_list = [] + for index in range(0, len(shots_to_render), chunk_size): + + shots[str(index)] = ",".join(shots_to_render[index : index + chunk_size]) + + frame_list.append(str(index)) + + job_info["Frames"] = ",".join(frame_list) + + # Get the current index of the ExtraInfoKeyValue pair, we will + # increment the index, so we do not stomp other settings + extra_info_key_indexs = set() + for key in job_info.keys(): + if key.startswith("ExtraInfoKeyValue"): + _, index = key.split("ExtraInfoKeyValue") + extra_info_key_indexs.add(int(index)) + + # Get the highest number in the index list and increment the number + # by one + current_index = max(extra_info_key_indexs) + 1 if extra_info_key_indexs else 0 + + # Put the serialized Queue into the Job data but hidden from + # Deadline UI + job_info[f"ExtraInfoKeyValue{current_index}"] = f"serialized_pipeline={serialized_pipeline}" + + # Increment the index + current_index += 1 + + # Put the shot info in the job extra info keys + job_info[f"ExtraInfoKeyValue{current_index}"] = f"shot_info={json.dumps(shots)}" + current_index += 1 + + # Set the job output directory override on the deadline job + if hasattr(new_job, "output_directory_override"): + if new_job.output_directory_override.path: + job_info[f"ExtraInfoKeyValue{current_index}"] = f"output_directory_override={new_job.output_directory_override.path}" + + current_index += 1 + + # Set the job filename format override on the deadline job + if hasattr(new_job, "filename_format_override"): + if new_job.filename_format_override: + job_info[f"ExtraInfoKeyValue{current_index}"] = f"filename_format_override={new_job.filename_format_override}" + + current_index += 1 + + # Build the command line arguments the remote machine will use. + # The Deadline plugin will provide the executable since it is local to + # the machine. It will also write out queue manifest to the correct + # location relative to the Saved folder + + # Get the current commandline args from the plugin info + plugin_info_cmd_args = [plugin_info.get("CommandLineArguments", "")] + + if not plugin_info.get("ProjectFile"): + project_file = plugin_info.get("ProjectFile", game_name_or_project_file) + plugin_info["ProjectFile"] = project_file + + # This is the map included in the plugin to boot up to. + project_cmd_args = [ + f"MoviePipelineEntryMap?game={game_override_class.get_path_name()}" + ] + + # Combine all the compiled arguments + full_cmd_args = project_cmd_args + command_args + plugin_info_cmd_args + + # Remove any duplicates in the commandline args and convert to a string + full_cmd_args = " ".join(list(OrderedDict.fromkeys(full_cmd_args))).strip() + + unreal.log(f"Deadline job command line args: {full_cmd_args}") + + # Update the plugin info with the commandline arguments + plugin_info.update( + { + "CommandLineArguments": full_cmd_args, + "CommandLineMode": "false", + } + ) + + deadline_job.job_info = job_info + deadline_job.plugin_info = plugin_info + + # Submit the deadline job + return deadline_service.submit_job(deadline_job) + + # TODO: For performance reasons, we will skip updating the UI and request + # that users use a different mechanism for checking on job statuses. + # This will be updated once we have a performant solution. diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Widgets/QueueAssetSubmitter.uasset b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Widgets/QueueAssetSubmitter.uasset new file mode 100644 index 0000000000..442152b22d Binary files /dev/null and b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Content/Widgets/QueueAssetSubmitter.uasset differ diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/MoviePipelineDeadline.uplugin b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/MoviePipelineDeadline.uplugin new file mode 100644 index 0000000000..6a6558fb6e --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/MoviePipelineDeadline.uplugin @@ -0,0 +1,44 @@ +{ + "FileVersion": 3, + "Version": 1, + "VersionName": "1.0", + "FriendlyName": "Movie Pipeline Deadline", + "Description": "A plugin which adds support for distributed rendering via Deadline to the Movie Render Pipeline.", + "Category": "Epic Deadline", + "CreatedBy": "Epic Games, Inc.", + "CreatedByURL" : "http://epicgames.com", + "DocsURL": "", + "MarketplaceURL": "", + "SupportURL": "", + "EnabledByDefault" : false, + "CanContainContent": true, + "IsBetaVersion": true, + "IsExperimentalVersion": false, + "Installed": false, + "Plugins": + [ + { + "Name": "PythonScriptPlugin", + "Enabled": true + }, + { + "Name": "MovieRenderPipeline", + "Enabled": true + }, + { + "Name": "EditorScriptingUtilities", + "Enabled": true + }, + { + "Name": "UnrealDeadlineService", + "Enabled": true + } + ], + "Modules": + [ + { + "Name": "MoviePipelineDeadline", + "Type" : "UncookedOnly" + } + ] +} \ No newline at end of file diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Resources/Icon128.png b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Resources/Icon128.png new file mode 100644 index 0000000000..1231d4aad4 Binary files /dev/null and b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Resources/Icon128.png differ diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/MoviePipelineDeadline.Build.cs b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/MoviePipelineDeadline.Build.cs new file mode 100644 index 0000000000..8ed4b7c041 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/MoviePipelineDeadline.Build.cs @@ -0,0 +1,32 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +using UnrealBuildTool; + +public class MoviePipelineDeadline : ModuleRules +{ + public MoviePipelineDeadline(ReadOnlyTargetRules Target) : base(Target) + { + ShortName = "DMP"; + + PrivateDependencyModuleNames.AddRange( + new string[] { + "Core", + "CoreUObject", + "DeadlineService", + "DeveloperSettings", + "Engine", + "InputCore", + "MovieRenderPipelineCore", + "PropertyEditor", + "RenderCore", + "Slate", + "SlateCore" + } + ); + + PublicDependencyModuleNames.AddRange( + new string[] { + } + ); + } +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/DeadlineJobPresetCustomization.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/DeadlineJobPresetCustomization.cpp new file mode 100644 index 0000000000..44202404da --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/DeadlineJobPresetCustomization.cpp @@ -0,0 +1,338 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "DeadlineJobPresetCustomization.h" + +#include "MoviePipelineDeadlineExecutorJob.h" +#include "MoviePipelineDeadlineSettings.h" + +#include "DetailWidgetRow.h" +#include "IDetailChildrenBuilder.h" +#include "IDetailGroup.h" +#include "Widgets/Input/SCheckBox.h" +#include "Widgets/Layout/SBox.h" + +class SEyeCheckBox : public SCompoundWidget +{ +public: + SLATE_BEGIN_ARGS( SEyeCheckBox ){} + + SLATE_END_ARGS() + + void Construct(const FArguments& InArgs, const FName& InPropertyPath) + { + ChildSlot + [ + SNew(SBox) + .Visibility(EVisibility::Visible) + .HAlign(HAlign_Right) + .WidthOverride(28) + .HeightOverride(20) + .Padding(4, 0) + [ + SAssignNew(CheckBoxPtr, SCheckBox) + .Style(&FAppStyle::Get().GetWidgetStyle("ToggleButtonCheckbox")) + .Visibility_Lambda([this]() + { + return CheckBoxPtr.IsValid() && !CheckBoxPtr->IsChecked() ? EVisibility::Visible : IsHovered() ? EVisibility::Visible : EVisibility::Hidden; + }) + .CheckedImage(FAppStyle::Get().GetBrush("Icons.Visible")) + .CheckedHoveredImage(FAppStyle::Get().GetBrush("Icons.Visible")) + .CheckedPressedImage(FAppStyle::Get().GetBrush("Icons.Visible")) + .UncheckedImage(FAppStyle::Get().GetBrush("Icons.Hidden")) + .UncheckedHoveredImage(FAppStyle::Get().GetBrush("Icons.Hidden")) + .UncheckedPressedImage(FAppStyle::Get().GetBrush("Icons.Hidden")) + .ToolTipText(NSLOCTEXT("FDeadlineJobPresetLibraryCustomization", "VisibleInMoveRenderQueueToolTip", "If true this property will be visible for overriding from Movie Render Queue.")) + .OnCheckStateChanged_Lambda([InPropertyPath](ECheckBoxState CheckType) + { + if (UMoviePipelineDeadlineSettings* Settings = + GetMutableDefault()) + { + if (CheckType == ECheckBoxState::Unchecked) + { + Settings->AddPropertyToHideInMovieRenderQueue( + InPropertyPath); + } + else + { + Settings-> + RemovePropertyToHideInMovieRenderQueue( + InPropertyPath); + } + } + }) + .IsChecked_Lambda([InPropertyPath]() + { + return FDeadlineJobPresetCustomization::IsPropertyHiddenInMovieRenderQueue(InPropertyPath) + ? ECheckBoxState::Unchecked + : ECheckBoxState::Checked; + }) + ] + ]; + } + + TSharedPtr CheckBoxPtr; +}; + +TSharedRef FDeadlineJobPresetCustomization::MakeInstance() +{ + return MakeShared(); +} + +void FDeadlineJobPresetCustomization::CustomizeChildren(TSharedRef StructHandle, + IDetailChildrenBuilder& ChildBuilder, IPropertyTypeCustomizationUtils& CustomizationUtils) +{ + TArray OuterObjects; + StructHandle->GetOuterObjects(OuterObjects); + + if (OuterObjects.Num() == 0) + { + return; + } + + const TWeakObjectPtr OuterObject = OuterObjects[0]; + if (!OuterObject.IsValid()) + { + return; + } + + UMoviePipelineDeadlineExecutorJob* OuterJob = Cast(OuterObject); + + TMap CreatedCategories; + + const FName StructName(StructHandle->GetProperty()->GetFName()); + + if (OuterJob) + { + IDetailGroup& BaseCategoryGroup = ChildBuilder.AddGroup(StructName, StructHandle->GetPropertyDisplayName()); + CreatedCategories.Add(StructName, &BaseCategoryGroup); + } + + // For each map member and each struct member in the map member value + uint32 NumChildren; + StructHandle->GetNumChildren(NumChildren); + + // For each struct member + for (uint32 ChildIndex = 0; ChildIndex < NumChildren; ++ChildIndex) + { + const TSharedRef ChildHandle = StructHandle->GetChildHandle(ChildIndex).ToSharedRef(); + + // Skip properties that are hidden so we don't end up creating empty categories in the job details + if (OuterJob && IsPropertyHiddenInMovieRenderQueue(*ChildHandle->GetProperty()->GetPathName())) + { + continue; + } + + IDetailGroup* GroupToUse = nullptr; + if (const FString* PropertyCategoryString = ChildHandle->GetProperty()->FindMetaData(TEXT("Category"))) + { + FName PropertyCategoryName(*PropertyCategoryString); + + if (IDetailGroup** FoundCategory = CreatedCategories.Find(PropertyCategoryName)) + { + GroupToUse = *FoundCategory; + } + else + { + if (OuterJob) + { + IDetailGroup& NewGroup = CreatedCategories.FindChecked(StructName)->AddGroup(PropertyCategoryName, FText::FromName(PropertyCategoryName), true); + GroupToUse = CreatedCategories.Add(PropertyCategoryName, &NewGroup); + } + else + { + IDetailGroup& NewGroup = ChildBuilder.AddGroup(PropertyCategoryName, FText::FromName(PropertyCategoryName)); + NewGroup.ToggleExpansion(true); + GroupToUse = CreatedCategories.Add(PropertyCategoryName, &NewGroup); + } + } + } + + IDetailPropertyRow& PropertyRow = GroupToUse->AddPropertyRow(ChildHandle); + + if (OuterJob) + { + CustomizeStructChildrenInMovieRenderQueue(PropertyRow, OuterJob); + } + else + { + CustomizeStructChildrenInAssetDetails(PropertyRow); + } + } + + // Force expansion of all categories + for (const TTuple& Pair : CreatedCategories) + { + if (Pair.Value) + { + Pair.Value->ToggleExpansion(true); + } + } +} + +void FDeadlineJobPresetCustomization::CustomizeStructChildrenInAssetDetails(IDetailPropertyRow& PropertyRow) const +{ + TSharedPtr NameWidget; + TSharedPtr ValueWidget; + FDetailWidgetRow Row; + PropertyRow.GetDefaultWidgets(NameWidget, ValueWidget, Row); + + PropertyRow.CustomWidget(true) + .NameContent() + .MinDesiredWidth(Row.NameWidget.MinWidth) + .MaxDesiredWidth(Row.NameWidget.MaxWidth) + .HAlign(HAlign_Fill) + [ + NameWidget.ToSharedRef() + ] + .ValueContent() + .MinDesiredWidth(Row.ValueWidget.MinWidth) + .MaxDesiredWidth(Row.ValueWidget.MaxWidth) + .VAlign(VAlign_Center) + [ + ValueWidget.ToSharedRef() + ] + .ExtensionContent() + [ + SNew(SEyeCheckBox, *PropertyRow.GetPropertyHandle()->GetProperty()->GetPathName()) + ]; +} + +void FDeadlineJobPresetCustomization::CustomizeStructChildrenInMovieRenderQueue( + IDetailPropertyRow& PropertyRow, UMoviePipelineDeadlineExecutorJob* Job) const +{ + TSharedPtr NameWidget; + TSharedPtr ValueWidget; + FDetailWidgetRow Row; + PropertyRow.GetDefaultWidgets(NameWidget, ValueWidget, Row); + + const FName PropertyPath = *PropertyRow.GetPropertyHandle()->GetProperty()->GetPathName(); + + ValueWidget->SetEnabled(TAttribute::CreateLambda([Job, PropertyPath]() + { + if (!Job) + { + // Return true so by default all properties are enabled for overrides + return true; + } + + return Job->IsPropertyRowEnabledInMovieRenderJob(PropertyPath); + })); + + PropertyRow + .OverrideResetToDefault( + FResetToDefaultOverride::Create( + FIsResetToDefaultVisible::CreateStatic( &FDeadlineJobPresetCustomization::IsResetToDefaultVisibleOverride, Job), + FResetToDefaultHandler::CreateStatic(&FDeadlineJobPresetCustomization::ResetToDefaultOverride, Job))) + .CustomWidget(true) + .NameContent() + .MinDesiredWidth(Row.NameWidget.MinWidth) + .MaxDesiredWidth(Row.NameWidget.MaxWidth) + .HAlign(HAlign_Fill) + [ + SNew(SHorizontalBox) + + SHorizontalBox::Slot() + .AutoWidth() + .Padding(4, 0) + [ + SNew(SCheckBox) + .IsChecked_Lambda([Job, PropertyPath]() + { + if (!Job) + { + // Return Checked so by default all properties are enabled for overrides + return ECheckBoxState::Checked; + } + + return Job->IsPropertyRowEnabledInMovieRenderJob(PropertyPath) ? ECheckBoxState::Checked : ECheckBoxState::Unchecked; + }) + .OnCheckStateChanged_Lambda([Job, PropertyPath](const ECheckBoxState NewState) + { + if (!Job) + { + return; + } + + return Job->SetPropertyRowEnabledInMovieRenderJob( + PropertyPath, NewState == ECheckBoxState::Checked ? true : false); + }) + ] + + SHorizontalBox::Slot() + [ + NameWidget.ToSharedRef() + ] + ] + .ValueContent() + .MinDesiredWidth(Row.ValueWidget.MinWidth) + .MaxDesiredWidth(Row.ValueWidget.MaxWidth) + .VAlign(VAlign_Center) + [ + ValueWidget.ToSharedRef() + ]; +} + +bool FDeadlineJobPresetCustomization::IsPropertyHiddenInMovieRenderQueue(const FName& InPropertyPath) +{ + if (const UMoviePipelineDeadlineSettings* Settings = GetDefault()) + { + return Settings->GetIsPropertyHiddenInMovieRenderQueue(InPropertyPath); + } + return false; +} + +bool FDeadlineJobPresetCustomization::IsPropertyRowEnabledInMovieRenderJob(const FName& InPropertyPath, + UMoviePipelineDeadlineExecutorJob* Job) +{ + return Job && Job->IsPropertyRowEnabledInMovieRenderJob(InPropertyPath); +} + +bool GetPresetValueAsString(const FProperty* PropertyPtr, UMoviePipelineDeadlineExecutorJob* Job, FString& OutFormattedValue) +{ + if (!PropertyPtr || !Job) + { + return false; + } + + UDeadlineJobPreset* SelectedJobPreset = Job->JobPreset; + if (!SelectedJobPreset) + { + return false; + } + + const void* ValuePtr = PropertyPtr->ContainerPtrToValuePtr(&SelectedJobPreset->JobPresetStruct); + PropertyPtr->ExportText_Direct(OutFormattedValue, ValuePtr, ValuePtr, nullptr, PPF_None); + return true; +} + +bool FDeadlineJobPresetCustomization::IsResetToDefaultVisibleOverride( + TSharedPtr PropertyHandle, UMoviePipelineDeadlineExecutorJob* Job) +{ + if (!PropertyHandle || !Job) + { + return true; + } + + if (FString DefaultValueAsString; GetPresetValueAsString(PropertyHandle->GetProperty(), Job, DefaultValueAsString)) + { + FString CurrentValueAsString; + PropertyHandle->GetValueAsFormattedString(CurrentValueAsString); + + return CurrentValueAsString != DefaultValueAsString; + } + + // If this fails, just show it by default + return true; +} + +void FDeadlineJobPresetCustomization::ResetToDefaultOverride( + TSharedPtr PropertyHandle, UMoviePipelineDeadlineExecutorJob* Job) +{ + if (!PropertyHandle || !Job) + { + return; + } + + if (FString DefaultValueAsString; GetPresetValueAsString(PropertyHandle->GetProperty(), Job, DefaultValueAsString)) + { + PropertyHandle->SetValueFromFormattedString(DefaultValueAsString); + } +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineExecutorJob.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineExecutorJob.cpp new file mode 100644 index 0000000000..94b6ed625c --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineExecutorJob.cpp @@ -0,0 +1,102 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "MoviePipelineDeadlineExecutorJob.h" + +#include "MoviePipelineDeadlineSettings.h" + +UMoviePipelineDeadlineExecutorJob::UMoviePipelineDeadlineExecutorJob() + : UMoviePipelineExecutorJob() +{ + // If a Job Preset is not already defined, assign the default preset + if (!JobPreset) + { + if (const UMoviePipelineDeadlineSettings* MpdSettings = GetDefault()) + { + if (const TObjectPtr DefaultPreset = MpdSettings->DefaultJobPreset) + { + JobPreset = DefaultPreset; + } + } + } +} + +bool UMoviePipelineDeadlineExecutorJob::IsPropertyRowEnabledInMovieRenderJob(const FName& InPropertyPath) const +{ + if (const FPropertyRowEnabledInfo* Match = Algo::FindByPredicate(EnabledPropertyOverrides, + [&InPropertyPath](const FPropertyRowEnabledInfo& Info) + { + return Info.PropertyPath == InPropertyPath; + })) + { + return Match->bIsEnabled; + } + + return false; +} + +void UMoviePipelineDeadlineExecutorJob::SetPropertyRowEnabledInMovieRenderJob(const FName& InPropertyPath, bool bInEnabled) +{ + if (FPropertyRowEnabledInfo* Match = Algo::FindByPredicate(EnabledPropertyOverrides, + [&InPropertyPath](const FPropertyRowEnabledInfo& Info) + { + return Info.PropertyPath == InPropertyPath; + })) + { + Match->bIsEnabled = bInEnabled; + } + else + { + EnabledPropertyOverrides.Add({InPropertyPath, bInEnabled}); + } +} + +void UMoviePipelineDeadlineExecutorJob::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + // Check if we changed the job Preset an update the override details + if (const FName PropertyName = PropertyChangedEvent.GetPropertyName(); PropertyName == "JobPreset") + { + if (const UDeadlineJobPreset* SelectedJobPreset = this->JobPreset) + { + this->PresetOverrides = SelectedJobPreset->JobPresetStruct; + } + } +} + +FDeadlineJobPresetStruct UMoviePipelineDeadlineExecutorJob::GetDeadlineJobPresetStructWithOverrides() const +{ + // Start with preset properties + FDeadlineJobPresetStruct ReturnValue = JobPreset->JobPresetStruct; + + const UMoviePipelineDeadlineSettings* Settings = GetDefault(); + + for (TFieldIterator PropIt(FDeadlineJobPresetStruct::StaticStruct()); PropIt; ++PropIt) + { + const FProperty* Property = *PropIt; + if (!Property) + { + continue; + } + + const FName PropertyPath = *Property->GetPathName(); + + // Skip hidden properties (just return the preset value) + if (Settings && Settings->GetIsPropertyHiddenInMovieRenderQueue(PropertyPath)) + { + continue; + } + + // Also skip if it's shown but not enabled + if (!IsPropertyRowEnabledInMovieRenderJob(PropertyPath)) + { + continue; + } + + // Get Override Property Value + const void* OverridePropertyValuePtr = Property->ContainerPtrToValuePtr(&PresetOverrides); + + void* ReturnPropertyValuePtr = Property->ContainerPtrToValuePtr(&ReturnValue); + Property->CopyCompleteValue(ReturnPropertyValuePtr, OverridePropertyValuePtr); + } + + return ReturnValue; +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineExecutorJobCustomization.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineExecutorJobCustomization.cpp new file mode 100644 index 0000000000..988116a2a6 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineExecutorJobCustomization.cpp @@ -0,0 +1,30 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "MoviePipelineDeadlineExecutorJobCustomization.h" + +#include "DetailCategoryBuilder.h" +#include "DetailLayoutBuilder.h" + +TSharedRef FMoviePipelineDeadlineExecutorJobCustomization::MakeInstance() +{ + return MakeShared(); +} + +void FMoviePipelineDeadlineExecutorJobCustomization::CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) +{ + IDetailCategoryBuilder& MrpCategory = DetailBuilder.EditCategory("Movie Render Pipeline"); + + TArray> OutMrpCategoryProperties; + MrpCategory.GetDefaultProperties(OutMrpCategoryProperties); + + // We hide these properties because we want to use "Name", "UserName" and "Comment" from the Deadline preset + const TArray PropertiesToHide = {"JobName", "Author"}; + + for (const TSharedRef& PropertyHandle : OutMrpCategoryProperties) + { + if (PropertiesToHide.Contains(PropertyHandle->GetProperty()->GetFName())) + { + PropertyHandle->MarkHiddenByCustomization(); + } + } +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineModule.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineModule.cpp new file mode 100644 index 0000000000..c716554d8b --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineModule.cpp @@ -0,0 +1,39 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "MoviePipelineDeadlineModule.h" + +#include "DeadlineJobPreset.h" +#include "DeadlineJobPresetCustomization.h" +#include "MoviePipelineDeadlineExecutorJob.h" +#include "MoviePipelineDeadlineExecutorJobCustomization.h" + +#include "Modules/ModuleManager.h" +#include "PropertyEditorModule.h" + +void FMoviePipelineDeadlineModule::StartupModule() +{ + FPropertyEditorModule& PropertyModule = FModuleManager::LoadModuleChecked("PropertyEditor"); + + PropertyModule.RegisterCustomClassLayout( + UMoviePipelineDeadlineExecutorJob::StaticClass()->GetFName(), + FOnGetDetailCustomizationInstance::CreateStatic(&FMoviePipelineDeadlineExecutorJobCustomization::MakeInstance)); + + PropertyModule.RegisterCustomPropertyTypeLayout( + FDeadlineJobPresetStruct::StaticStruct()->GetFName(), + FOnGetPropertyTypeCustomizationInstance::CreateStatic(&FDeadlineJobPresetCustomization::MakeInstance)); + + PropertyModule.NotifyCustomizationModuleChanged(); +} + +void FMoviePipelineDeadlineModule::ShutdownModule() +{ + if (FPropertyEditorModule* PropertyModule = FModuleManager::Get().GetModulePtr("PropertyEditor")) + { + PropertyModule->UnregisterCustomPropertyTypeLayout(UMoviePipelineDeadlineExecutorJob::StaticClass()->GetFName()); + PropertyModule->UnregisterCustomPropertyTypeLayout(FDeadlineJobPresetStruct::StaticStruct()->GetFName()); + + PropertyModule->NotifyCustomizationModuleChanged(); + } +} + +IMPLEMENT_MODULE(FMoviePipelineDeadlineModule, MoviePipelineDeadline); diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineSettings.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineSettings.cpp new file mode 100644 index 0000000000..ff3c8aa103 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Private/MoviePipelineDeadlineSettings.cpp @@ -0,0 +1,26 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "MoviePipelineDeadlineSettings.h" + +UMoviePipelineDeadlineSettings::UMoviePipelineDeadlineSettings() +{ + const TArray PropertiesToShowByDefault = {"Name", "Comment", "Department", "Pool", "Group", "Priority", "UserName"}; + + // Set up default properties to show in MRQ + // We do this by setting everything to hide except some defined exceptions by name + for (TFieldIterator PropIt(FDeadlineJobPresetStruct::StaticStruct()); PropIt; ++PropIt) + { + const FProperty* Property = *PropIt; + if (!Property) + { + continue; + } + + if (PropertiesToShowByDefault.Contains(Property->GetName())) + { + continue; + } + + JobPresetPropertiesToHideInMovieRenderQueue.Add(*Property->GetPathName()); + } +} \ No newline at end of file diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/DeadlineJobPresetCustomization.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/DeadlineJobPresetCustomization.h new file mode 100644 index 0000000000..af1db76669 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/DeadlineJobPresetCustomization.h @@ -0,0 +1,36 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "IPropertyTypeCustomization.h" + +class IDetailPropertyRow; +class UMoviePipelineDeadlineExecutorJob; + +/** + * This customization lives in the MoviePipelineDeadline module because in order to get + * the preset assigned to the owning job, we need to cast the owning object to the + * UMoviePipelineDeadlineExecutorJob class. We need the assigned preset for the custom + * ResetToDefault behaviour. + */ +class FDeadlineJobPresetCustomization : public IPropertyTypeCustomization +{ +public: + + static TSharedRef< IPropertyTypeCustomization > MakeInstance(); + + /** Begin IPropertyTypeCustomization interface */ + virtual void CustomizeHeader(TSharedRef PropertyHandle, FDetailWidgetRow& HeaderRow, IPropertyTypeCustomizationUtils& CustomizationUtils) override {} + virtual void CustomizeChildren(TSharedRef StructHandle, IDetailChildrenBuilder& ChildBuilder, IPropertyTypeCustomizationUtils& CustomizationUtils) override; + /** End IPropertyTypeCustomization interface */ + + static bool IsPropertyHiddenInMovieRenderQueue(const FName& InPropertyPath); + static bool IsPropertyRowEnabledInMovieRenderJob(const FName& InPropertyPath, UMoviePipelineDeadlineExecutorJob* Job); + +protected: + void CustomizeStructChildrenInAssetDetails(IDetailPropertyRow& PropertyRow) const; + void CustomizeStructChildrenInMovieRenderQueue(IDetailPropertyRow& PropertyRow, UMoviePipelineDeadlineExecutorJob* Job) const; + + static bool IsResetToDefaultVisibleOverride(TSharedPtr PropertyHandle, UMoviePipelineDeadlineExecutorJob* Job); + static void ResetToDefaultOverride(TSharedPtr PropertyHandle, UMoviePipelineDeadlineExecutorJob* Job); +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineExecutorJob.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineExecutorJob.h new file mode 100644 index 0000000000..57c1cd9916 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineExecutorJob.h @@ -0,0 +1,66 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "MoviePipelineQueue.h" +#include "DeadlineJobPreset.h" + +#include "MoviePipelineDeadlineExecutorJob.generated.h" + +USTRUCT() +struct FPropertyRowEnabledInfo +{ + GENERATED_BODY() + + FName PropertyPath; + bool bIsEnabled = false; +}; + +UCLASS(BlueprintType, config = EditorPerProjectUserSettings) +class MOVIEPIPELINEDEADLINE_API UMoviePipelineDeadlineExecutorJob : public UMoviePipelineExecutorJob +{ + GENERATED_BODY() +public: + UMoviePipelineDeadlineExecutorJob(); + + bool IsPropertyRowEnabledInMovieRenderJob(const FName& InPropertyPath) const; + + void SetPropertyRowEnabledInMovieRenderJob(const FName& InPropertyPath, bool bInEnabled); + + /** UObject interface */ + #if WITH_EDITOR + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; +#endif + + + /** + * Returns the Deadline job info with overrides applied, if enabled. + * Skips any property not + */ + UFUNCTION(BlueprintCallable, Category = "DeadlineService") + FDeadlineJobPresetStruct GetDeadlineJobPresetStructWithOverrides() const; + + /** `Batch Name` groups similar jobs together in the Deadline Monitor UI. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FString BatchName; + + /* Deadline Job Preset. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Deadline") + TObjectPtr JobPreset; + + /* Output directory override on Deadline. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FDirectoryPath OutputDirectoryOverride; + + /* Filename Format override on Deadline. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FString FilenameFormatOverride; + + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FDeadlineJobPresetStruct PresetOverrides = FDeadlineJobPresetStruct(); + +protected: + + UPROPERTY(config) + TArray EnabledPropertyOverrides; +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineExecutorJobCustomization.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineExecutorJobCustomization.h new file mode 100644 index 0000000000..e0220d2b19 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineExecutorJobCustomization.h @@ -0,0 +1,22 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "IDetailCustomization.h" + +/** + * This customization lives in the MoviePipelineDeadline module because in order to get + * the preset assigned to the owning job, we need to cast the owning object to the + * UMoviePipelineDeadlineExecutorJob class. We need the assigned preset for the custom + * ResetToDefault behaviour. + */ +class FMoviePipelineDeadlineExecutorJobCustomization : public IDetailCustomization +{ +public: + + static TSharedRef< IDetailCustomization > MakeInstance(); + + /** Begin IDetailCustomization interface */ + virtual void CustomizeDetails(IDetailLayoutBuilder& DetailBuilder) override; + /** End IDetailCustomization interface */ +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineModule.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineModule.h new file mode 100644 index 0000000000..603730dda9 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineModule.h @@ -0,0 +1,13 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Modules/ModuleInterface.h" + +class FMoviePipelineDeadlineModule : public IModuleInterface +{ +public: + virtual void StartupModule() override; + + virtual void ShutdownModule() override; +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineSettings.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineSettings.h new file mode 100644 index 0000000000..0a53f58ea3 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/MoviePipelineDeadline/Source/MoviePipelineDeadline/Public/MoviePipelineDeadlineSettings.h @@ -0,0 +1,57 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Engine/DeveloperSettings.h" + +#include "DeadlineJobPreset.h" + +#include "MoviePipelineDeadlineSettings.generated.h" + +/** +* Project-wide settings for Deadline Movie Pipeline. +*/ +UCLASS(BlueprintType, config = Editor, defaultconfig, meta = (DisplayName = "Movie Pipeline Deadline")) +class UMoviePipelineDeadlineSettings : public UDeveloperSettings +{ + GENERATED_BODY() + +public: + UMoviePipelineDeadlineSettings(); + + /** Gets the settings container name for the settings, either Project or Editor */ + virtual FName GetContainerName() const override { return FName("Project"); } + /** Gets the category for the settings, some high level grouping like, Editor, Engine, Game...etc. */ + virtual FName GetCategoryName() const override { return FName("Plugins"); } + + /** UObject interface */ + virtual void PostEditChangeProperty(struct FPropertyChangedEvent& PropertyChangedEvent) override + { + Super::PostEditChangeProperty(PropertyChangedEvent); + SaveConfig(); + } + + /** The project level Deadline preset Data Asset */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Movie Pipeline Deadline") + TObjectPtr DefaultJobPreset; + + void AddPropertyToHideInMovieRenderQueue(const FName& InPropertyPath) + { + JobPresetPropertiesToHideInMovieRenderQueue.Add(InPropertyPath); + } + + void RemovePropertyToHideInMovieRenderQueue(const FName& InPropertyPath) + { + JobPresetPropertiesToHideInMovieRenderQueue.Remove(InPropertyPath); + } + + bool GetIsPropertyHiddenInMovieRenderQueue(const FName& InPropertyPath) const + { + return JobPresetPropertiesToHideInMovieRenderQueue.Contains(InPropertyPath); + } + +protected: + + UPROPERTY(config) + TArray JobPresetPropertiesToHideInMovieRenderQueue; +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/README.md b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/README.md new file mode 100644 index 0000000000..3e7d20595b --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/README.md @@ -0,0 +1,36 @@ +# Deadline Unreal Engine Service plugin +To use this plugin copy the `UnrealDeadlineService` and the `MoviePipelineDeadline` to the `Plugins` directory located in your Unreal Project's directory. + +For further documentation on this plugin, please refer to the [Unreal Engine 5](https://docs.thinkboxsoftware.com/products/deadline/10.3/1_User%20Manual/manual/app-index.html#u) documentation available on our doc website. +> **_Note:_** +> This plugin's web service mode has a dependency on `urllib3` that is not packaged with this +> plugin. To resolve this, execute the `requirements.txt` file in the +> `unreal/UnrealDeadlineService/Content/Python/Lib` directory and save the `urllib3` +> site packages in the `Win64` directory of the above path. +> The engine will automatically add this library to the Python path and make it +> available to the Python interpreter. + +# Unreal Movie Pipeline Deadline plugin + +Although usage documentation for this plugin is a work in progress, +it does not limit the use of other Deadline service features. +This plugin serves as an example of how to use the aforementioned Deadline services. + +> **_Note:_** +> Currently, it is recommended to build the Engine from source as the current +> state of the plugins do not have compiled versions for the released Editor binaries. +> Building the Engine from source allows you to install the necessary dependencies +> for compiling the Engine plugins locally. This issue will be remedied in future releases. Follow +> the instructions on [Downloading Unreal Engine Source Code](https://docs.unrealengine.com/5.1/en-US/downloading-unreal-engine-source-code/) +> to download the Engine versions from source and build the Engine locally. + +# Local Testing + +To test the functionality of the plugins, use the [Meerkat Demo](https://www.unrealengine.com/marketplace/en-US/product/meerkat-demo-02) +from the marketplace. This project is a self-contained cinematic project that +allows you to test movie rendering with the latest version of the Engine binaries. +This is the project we use for internal testing of the plugins. + +> **_Note:_** +> When you enable the plugins for this project, the Engine may need to +> recompile the custom Editor for this project. diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Config/DefaultUnrealDeadlineService.ini b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Config/DefaultUnrealDeadlineService.ini new file mode 100644 index 0000000000..0bfa6a8bb4 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Config/DefaultUnrealDeadlineService.ini @@ -0,0 +1,24 @@ +[CoreRedirects] ++ClassRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct",NewName="/Script/DeadlineService.DeadlineJobInfo") ++ClassRedirects=(OldName="/Script/DeadlineService.DeadlineJobPresetAsset",NewName="/Script/DeadlineService.DeadlineJobPreset") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobPresetLibrary.DeadlineJobPresets",NewName="/Script/DeadlineService.DeadlineJobPreset.JobPresetStruct") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.OverrideTaskExtraInfoNames",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.bOverrideTaskExtraInfoNames") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.TaskExtraInfo",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.TaskExtraInfoNames") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.TaskExtraInfoName",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.TaskExtraInfoNames") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.FramesList",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.Frames") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.PluginInfo",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.PluginInfo") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.SecondaryPool",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.SecondPool") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.JobName",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.Name") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.FrameList",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.Frames") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.bMachineListIsABlacklist",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.bMachineListIsDenylist") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.bMachineListIsAdenylist",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.bMachineListIsDenylist") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.TaskTimeout",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.TaskTimeoutSeconds") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.bEnableAutoTaskTimeout",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.bEnableAutoTimeout") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.Limits",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.LimitGroups") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.Dependencies",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.JobDependencies") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct.FramesPerTask",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.ChunkSize") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobPreset.JobInfo",NewName="/Script/DeadlineService.DeadlineJobPreset.JobPresetStruct") ++PropertyRedirects=(OldName="/Script/DeadlineService.DeadlineJobPresetStruct.PluginInfoPreset",NewName="/Script/DeadlineService.DeadlineJobPresetStruct.PluginInfo") ++ClassRedirects=(OldName="/Script/DeadlineService.DeadlineJobPresetLibrary",NewName="/Script/DeadlineService.DeadlineJobPreset") ++StructRedirects=(OldName="/Script/DeadlineService.DeadlineJobInfoStruct",NewName="/Script/DeadlineService.DeadlineJobPresetStruct") ++FunctionRedirects=(OldName="/Script/DeadlineService.DeadlineServiceEditorHelpers.GetDeadlineJobInfoAsStringMap",NewName="/Script/DeadlineService.DeadlineServiceEditorHelpers.GetDeadlineJobInfo") diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Config/FilterPlugin.ini b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Config/FilterPlugin.ini new file mode 100644 index 0000000000..ccebca2f32 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Config/FilterPlugin.ini @@ -0,0 +1,8 @@ +[FilterPlugin] +; This section lists additional files which will be packaged along with your plugin. Paths should be listed relative to the root plugin directory, and +; may include "...", "*", and "?" wildcards to match directories, files, and individual characters respectively. +; +; Examples: +; /README.txt +; /Extras/... +; /Binaries/ThirdParty/*.dll diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/Lib/requirements.txt b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/Lib/requirements.txt new file mode 100644 index 0000000000..a42590bebe --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/Lib/requirements.txt @@ -0,0 +1 @@ +urllib3 diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_command.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_command.py new file mode 100644 index 0000000000..7610f81154 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_command.py @@ -0,0 +1,140 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-In +import os +import subprocess +import logging +import tempfile + +# Best-effort import for type annotations +try: + from typing import Any, List, Optional, Tuple, Union +except ImportError: + pass + +logger = logging.getLogger("DeadlineCommand") + +class DeadlineCommand: + """ + Class to manage use of DeadlineCommand + """ + def __init__(self): + self.deadlineCommand = self._get_DeadlineCommand() + + def _get_DeadlineCommand(self): + # type: () -> str + deadlineBin = "" # type: str + try: + deadlineBin = os.environ['DEADLINE_PATH'] + except KeyError: + #if the error is a key error it means that DEADLINE_PATH is not set. however Deadline command may be in the PATH or on OSX it could be in the file /Users/Shared/Thinkbox/DEADLINE_PATH + pass + + # On OSX, we look for the DEADLINE_PATH file if the environment variable does not exist. + if deadlineBin == "" and os.path.exists( "/Users/Shared/Thinkbox/DEADLINE_PATH" ): + with open( "/Users/Shared/Thinkbox/DEADLINE_PATH" ) as f: + deadlineBin = f.read().strip() + + deadlineCommand = os.path.join(deadlineBin, "deadlinecommand") # type: str + + return deadlineCommand + + def get_repository_path(self, subdir = None): + + startupinfo = None + + args = [self.deadlineCommand, "-GetRepositoryPath "] + if subdir != None and subdir != "": + args.append(subdir) + + # Specifying PIPE for all handles to workaround a Python bug on Windows. The unused handles are then closed immediatley afterwards. + logger.debug(f"Getting repository path via deadlinecommand with subprocess args: {args}") + proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo) + + proc.stdin.close() + proc.stderr.close() + + output = proc.stdout.read() + + path = output.decode("utf_8") + path = path.replace("\r","").replace("\n","").replace("\\","/") + + return path + + def get_pools(self): + startupinfo = None + + args = [self.deadlineCommand, "-GetPoolNames"] + + # Specifying PIPE for all handles to workaround a Python bug on Windows. The unused handles are then closed immediatley afterwards. + logger.debug(f"Getting pools via deadlinecommand with subprocess args: {args}") + proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo) + + proc.stdin.close() + proc.stderr.close() + + output = proc.stdout.read() + + path = output.decode("utf_8") + + return path.split(os.linesep) + + def get_groups(self): + startupinfo = None + + args = [self.deadlineCommand, "-GetGroupNames"] + + # Specifying PIPE for all handles to workaround a Python bug on Windows. The unused handles are then closed immediatley afterwards. + logger.debug(f"Getting groupsvia deadlinecommand with subprocess args: {args}") + proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo) + + proc.stdin.close() + proc.stderr.close() + + output = proc.stdout.read() + + path = output.decode("utf_8") + + return path.split(os.linesep) + + def submit_job(self, job_data): + startupinfo = None + + # cast dict to list of strings equivilent to job file and plugin file + job_info = [k+'='+v.replace("\n","").replace("\r","").replace("\t","")+'\n' for k, v in job_data["JobInfo"].items()] + plugin_info = [k+'='+v.replace("\n","").replace("\r","").replace("\t","")+'\n' for k, v in job_data["PluginInfo"].items()] + + with tempfile.NamedTemporaryFile(mode = "w", delete=False) as f_job, tempfile.NamedTemporaryFile(mode = "w", delete=False) as f_plugin: + logger.debug(f"Creating temporary job file {f_job.name}") + logger.debug(f"Creating temporary plugin file {f_plugin.name}") + f_job.writelines(job_info) + f_plugin.writelines(plugin_info) + + f_job.close() + f_plugin.close() + + args = [self.deadlineCommand, "-SubmitJob", f_job.name, f_plugin.name] + args.extend(job_data["aux_files"]) if "aux_files" in job_data else None # If aux files present extend args + # Specifying PIPE for all handles to workaround a Python bug on Windows. The unused handles are then closed immediatley afterwards. + logger.debug(f"Submitting job via deadlinecommand with subprocess args: {args}") + proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo) + + # On windows machines Temproary files cannot be opened by multiple processes so we cann use the delete=True flag and must clean up the tmp files ourselves. + # https://docs.python.org/3/library/tempfile.html#tempfile.NamedTemporaryFile + proc.wait() + os.remove(f_job.name) + os.remove(f_plugin.name) + logger.debug(f"Removed temporary job file {f_job.name}") + logger.debug(f"Removed temporary plugin file {f_plugin.name}") + + + proc.stdin.close() + proc.stderr.close() + + output = proc.stdout.read() + job_ids = [] + for line in output.decode("utf_8").split(os.linesep): + if line.startswith("JobID"): + job_ids.append(line.split("=")[1].strip()) + + return min(job_ids) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_enums.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_enums.py new file mode 100644 index 0000000000..54e08967d6 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_enums.py @@ -0,0 +1,55 @@ +# Built-in +from enum import Enum, auto + + +class AutoRequestName(Enum): + """ + Function to auto generate the enum value from its name. + Reference: https://docs.python.org/3/library/enum.html#using-automatic-values + """ + def _generate_next_value_(name, start, count, last_values): + return name + + +class HttpRequestType(AutoRequestName): + """ + Enum class for HTTP request types + """ + GET = auto() + PUT = auto() + POST = auto() + DELETE = auto() + + +class DeadlineJobState(Enum): + """Enum class for deadline states""" + + SUSPEND = "suspend" + RESUME = "resume" + REQUEUE = "requeue" + PEND = "pend" + ARCHIVE = "archive" + RESUME_FAILED = "resumefailed" + SUSPEND_NON_RENDERING = "suspendnonrendering" + RELEASE_PENDING = "releasepending" + COMPLETE = "complete" + FAIL = "fail" + UPDATE_SUBMISSION_DATE = "updatesubmissiondate" + UNDELETE = "undelete" + + +class DeadlineJobStatus(Enum): + """ + Enum class for deadline job status + Reference: https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/rest-jobs.html#job-property-values + """ + + UNKNOWN = "Unknown" + ACTIVE = "Active" + SUSPENDED = "Suspended" + COMPLETED = "Completed" + FAILED = "Failed" + RENDERING = "Rendering" + PENDING = "Pending" + IDLE = "Idle" + QUEUED = "Queued" diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_http.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_http.py new file mode 100644 index 0000000000..0ada41a79f --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_http.py @@ -0,0 +1,118 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-In +import logging +import json +logger = logging.getLogger("DeadlineHTTP") +try: + # Third-party + from urllib.parse import urljoin + from urllib3 import PoolManager + from urllib3.exceptions import HTTPError +except ImportError: + logger.info("module 'urllib3' not found") +# Internal +from deadline_enums import HttpRequestType + + + + +class DeadlineHttp: + """ + Class to send requests to deadline server + """ + + # ------------------------------------------------------------------------------------------------------------------ + # Magic Methods + + def __init__(self, host): + """ + Constructor + :param str host: Deadline server host + """ + self.host = host + + + # ------------------------------------------------------------------------------------------------------------------ + # Public Methods + + def send_http_request(self, request_type, api_url, payload=None, fields=None, headers=None, retries=0): + """ + This method is used to upload or receive data from the Deadline server. + :param HttpRequestType request_type: HTTP request verb. i.e GET/POST/PUT/DELETE + :param str api_url: URL relative path queries. Example: /jobs , /pools, /jobs?JobID=0000 + :param payload: Data object to POST/PUT to Deadline server + :param dict fields: Request fields. This is typically used in files and binary uploads + :param dict headers: Header data for request + :param int retries: The number of retries to attempt before failing request. Defaults to 0. + :return: JSON object response from the server. + """ + self._http_manager = PoolManager(cert_reqs='CERT_NONE') # Disable SSL certificate check + # Validate request type + if not isinstance(request_type, HttpRequestType): + raise ValueError(f"Request type must be of type {type(HttpRequestType)}") + + response = self._http_manager.request( + request_type.value, + urljoin(self.host, api_url), + body=payload, + fields=fields, + headers=headers, + retries=retries + ) + + return response.data + + def get_job_details(self, job_id): + """ + This method gets the job details for the deadline job + :param str job_id: Deadline JobID + :return: Job details object returned from the server. Usually a Json object + """ + + if not job_id: + raise ValueError(f"A JobID is required to get job details from Deadline. Got {job_id}.") + + api_query_string = f"api/jobs?JobID={job_id}&Details=true" + + job_details = self.send_http_request( + HttpRequestType.GET, + api_query_string + ) + + try: + job_details = json.loads(job_details.decode('utf-8'))[job_id] + + # If an error occurs trying to decode the json data, most likely an error occurred server side thereby + # returning a string instead of the data requested. + # Raise the decoded error + except Exception as err: + raise RuntimeError( + f"An error occurred getting the server data for {job_id}: \n{job_details.decode('utf-8')}" + ) + else: + return job_details + + def send_job_command(self, job_id, command): + """ + Send a command to the Deadline server for the job + :param str job_id: Deadline JobID + :param dict command: Command to send to the deadline server + :return: Returns the response from the server + """ + api_string = urljoin(self.host, "/api/jobs") + + if not job_id: + raise RuntimeError("There is no deadline job ID to send this command for.") + + # Add the job id to the command dictionary + command.update(JobID=job_id) + + response = self.send_http_request( + HttpRequestType.PUT, + api_string, + payload=json.dumps(command).encode('utf-8'), + headers={'Content-Type': 'application/json'} + ) + + return response.decode('utf-8') diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_job.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_job.py new file mode 100644 index 0000000000..630f190a06 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_job.py @@ -0,0 +1,250 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +Deadline Job object used to submit jobs to the render farm +""" + +# Built-In +import logging + +# Third-party +import unreal + +# Internal +from deadline_utils import merge_dictionaries, get_deadline_info_from_preset + +from deadline_enums import DeadlineJobStatus + +logger = logging.getLogger("DeadlineJob") + + +class DeadlineJob: + """ Unreal Deadline Job object """ + + # ------------------------------------------------------------------------------------------------------------------ + # Magic Methods + + def __init__(self, job_info=None, plugin_info=None, job_preset: unreal.DeadlineJobPreset=None): + """ Constructor """ + self._job_id = None + self._job_info = {} + self._plugin_info = {} + self._aux_files = [] + self._job_status: DeadlineJobStatus = DeadlineJobStatus.UNKNOWN + self._job_progress = 0.0 + + # Jobs details updated by server after submission + self._job_details = None + + # Update the job, plugin and aux file info from the data asset + if job_info and plugin_info: + self.job_info = job_info + self.plugin_info = plugin_info + + if job_preset: + self.job_info, self.plugin_info = get_deadline_info_from_preset(job_preset=job_preset) + + def __repr__(self): + return f"{self.__class__.__name__}({self.job_name}, {self.job_id})" + + # ------------------------------------------------------------------------------------------------------------------ + # Public Properties + + @property + def job_info(self): + """ + Returns the Deadline job info + :return: Deadline job Info as a dictionary + :rtype: dict + """ + return self._job_info + + @job_info.setter + def job_info(self, value: dict): + """ + Sets the Deadline Job Info + :param value: Value to set on the job info. + """ + if not isinstance(value, dict): + raise TypeError(f"Expected `dict` found {type(value)}") + + self._job_info = merge_dictionaries(self.job_info, value) + + if "AuxFiles" in self._job_info: + # Set the auxiliary files for this instance + self._aux_files = self._job_info.get("AuxFiles", []) + + # Remove the aux files array from the dictionary, doesn't belong there + self._job_info.pop("AuxFiles") + + @property + def plugin_info(self): + """ + Returns the Deadline plugin info + :return: Deadline plugin Info as a dictionary + :rtype: dict + """ + return self._plugin_info + + @plugin_info.setter + def plugin_info(self, value: dict): + """ + Sets the Deadline Plugin Info + :param value: Value to set on plugin info. + """ + if not isinstance(value, dict): + raise TypeError(f"Expected `dict` found {type(value)}") + + self._plugin_info = merge_dictionaries(self.plugin_info, value) + + @property + def job_id(self): + """ + Return the deadline job ID. This is the ID returned by the service after the job has been submitted + """ + return self._job_id + + @property + def job_name(self): + """ + Return the deadline job name. + """ + return self.job_info.get("Name", "Unnamed Job") + + @job_name.setter + def job_name(self, value): + """ + Updates the job name on the instance. This also updates the job name in the job info dictionary + :param str value: job name + """ + self.job_info.update({"Name": value}) + + @property + def aux_files(self): + """ + Returns the Auxiliary files for this job + :return: List of Auxiliary files + """ + return self._aux_files + + @property + def job_status(self): + """ + Return the current job status + :return: Deadline status + """ + + if not self.job_details: + return DeadlineJobStatus.UNKNOWN + + if "Job" not in self.job_details and "Status" not in self.job_details["Job"]: + return DeadlineJobStatus.UNKNOWN + + # Some Job statuses are represented as "Rendering (1)" to indicate the + # current status of the job and the number of tasks performing the + # current status. We only care about the job status so strip out the + # extra information. Task details are returned to the job details + # object which can be queried in a different implementation + return self.get_job_status_enum(self.job_details["Job"]["Status"].split()[0]) + + @job_status.setter + def job_status(self, value): + """ + Return the current job status + :param DeadlineJobStatus value: Job status to set on the object. + :return: Deadline status + """ + + # Statuses are expected to live in the job details object. Usually this + # property is only explicitly set if the status of a job is unknown. + # for example if the service detects a queried job is non-existent on + # the farm + + # NOTE: If the structure of how job status are represented in the job + # details changes, this implementation will need to be updated. + # Currently job statuses are represented in the jobs details as + # {"Job": {"Status": "Unknown"}} + + # "value" is expected to be an Enum so get the name of the Enum and set + # it on the job details. When the status property is called, + # this will be re-translated back into an enum. The reason for this is, + # the native job details object returned from the service has no + # concept of the job status enum. This is an internal + # representation which allows for more robust comparison operator logic + if self.job_details and isinstance(self.job_details, dict): + self.job_details.update({"Job": {"Status": value.name}}) + + @property + def job_progress(self): + """ + Returns the current job progress + :return: Deadline job progress as a float value + """ + + if not self.job_details: + return 0.0 + + if "Job" in self.job_details and "Progress" in self.job_details["Job"]: + progress_str = self._job_details["Job"]["Progress"] + progress_str = progress_str.split()[0] + + return float(progress_str) / 100 # 0-1 progress + + @property + def job_details(self): + """ + Returns the job details from the deadline service. + :return: Deadline Job details + """ + return self._job_details + + @job_details.setter + def job_details(self, value): + """ + Sets the job details from the deadline service. This is typically set + by the service, but can be used as a general container for job + information. + """ + self._job_details = value + + # ------------------------------------------------------------------------------------------------------------------ + # Public Methods + + def get_submission_data(self): + """ + Returns the submission data used by the Deadline service to submit a job + :return: Dictionary with job, plugin, auxiliary info + :rtype: dict + """ + return { + "JobInfo": self.job_info, + "PluginInfo": self.plugin_info, + "AuxFiles": self.aux_files + } + + # ------------------------------------------------------------------------------------------------------------------ + # Protected Methods + + @staticmethod + def get_job_status_enum(job_status): + """ + This method returns an enum representing the job status from the server + :param job_status: Deadline job status + :return: Returns the job_status as an enum + :rtype DeadlineJobStatus + """ + # Convert this job status returned by the server into the job status + # enum representation + + # Check if the job status name has an enum representation, if not check + # the value of the job_status. + # Reference: https://docs.thinkboxsoftware.com/products/deadline/10.1/1_User%20Manual/manual/rest-jobs.html#job-property-values + try: + status = DeadlineJobStatus(job_status) + except ValueError: + try: + status = getattr(DeadlineJobStatus, job_status) + except Exception as exp: + raise RuntimeError(f"An error occurred getting the Enum status type of {job_status}. Error: \n\t{exp}") + + return status diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/__init__.py new file mode 100644 index 0000000000..bd742b5e25 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/__init__.py @@ -0,0 +1,8 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +from .deadline_toolbar_menu import DeadlineToolBarMenu + + +__all__ = [ + "DeadlineToolBarMenu" +] diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/base_menu_action.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/base_menu_action.py new file mode 100644 index 0000000000..a54a182776 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/base_menu_action.py @@ -0,0 +1,58 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Third-party +import unreal + + +@unreal.uclass() +class BaseActionMenuEntry(unreal.ToolMenuEntryScript): + """ + This is a custom Unreal Class that adds executable python menus to the + Editor + """ + + def __init__(self, callable_method, parent=None): + """ + Constructor + :param callable_method: Callable method to execute + """ + super(BaseActionMenuEntry, self).__init__() + + self._callable = callable_method + self.parent = parent + + @unreal.ufunction(override=True) + def execute(self, context): + """ + Executes the callable method + :param context: + :return: + """ + self._callable() + + @unreal.ufunction(override=True) + def can_execute(self, context): + """ + Determines if a menu can be executed + :param context: + :return: + """ + return True + + @unreal.ufunction(override=True) + def get_tool_tip(self, context): + """ + Returns the tool tip for the menu + :param context: + :return: + """ + return self.data.tool_tip + + @unreal.ufunction(override=True) + def get_label(self, context): + """ + Returns the label of the menu + :param context: + :return: + """ + return self.data.name diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/deadline_toolbar_menu.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/deadline_toolbar_menu.py new file mode 100644 index 0000000000..9163a1a0cf --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_menus/deadline_toolbar_menu.py @@ -0,0 +1,131 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Third party +import unreal + +# Internal +from .base_menu_action import BaseActionMenuEntry + + +class DeadlineToolBarMenu(object): + """ + Class for Deadline Unreal Toolbar menu + """ + + TOOLBAR_NAME = "Deadline" + TOOLBAR_OWNER = "deadline.toolbar.menu" + PARENT_MENU = "LevelEditor.MainMenu" + SECTION_NAME = "deadline_section" + + def __init__(self): + """Constructor""" + + # Keep reference to tool menus from Unreal + self._tool_menus = None + + # Keep track of all the action menus that have been registered to + # Unreal. Without keeping these around, the Unreal GC will remove the + # menu objects and break the in-engine menu + self.menu_entries = [] + + self._top_level_menu = f"{self.PARENT_MENU}.{self.TOOLBAR_NAME}" + + self._initialize_toolbar() + + # Set up a shutdown callback for when python is existing to cleanly + # clear the menus + unreal.register_python_shutdown_callback(self._shutdown) + + @property + def _unreal_tools_menu(self): + """Get Unreal Editor Tool menu""" + if not self._tool_menus or self._tool_menus is None: + self._tool_menus = unreal.ToolMenus.get() + + return self._tool_menus + + def _initialize_toolbar(self): + """Initialize our custom toolbar with the Editor""" + + tools_menu = self._unreal_tools_menu + + # Create the custom menu and add it to Unreal Main Menu + main_menu = tools_menu.extend_menu(self.PARENT_MENU) + + # Create the submenu object + main_menu.add_sub_menu( + self.TOOLBAR_OWNER, + "", + self.TOOLBAR_NAME, + self.TOOLBAR_NAME + ) + + # Register the custom deadline menu to the Editor Main Menu + tools_menu.register_menu( + self._top_level_menu, + "", + unreal.MultiBoxType.MENU, + False + ) + + def _shutdown(self): + """Method to call when the editor is shutting down""" + + # Unregister all menus owned by the integration + self._tool_menus.unregister_owner_by_name(self.TOOLBAR_OWNER) + + # Clean up all the menu instances we are tracking + del self.menu_entries[:] + + def register_submenu( + self, + menu_name, + callable_method, + label_name=None, + description=None + ): + """ + Register a menu to the toolbar. + Note: This currently creates a flat submenu in the Main Menu + + :param str menu_name: The name of the submenu + :param object callable_method: A callable method to execute on menu + activation + :param str label_name: Nice Label name to display the menu + :param str description: Description of the menu. This will eb + displayed in the tooltip + """ + + # Get an instance of a custom `unreal.ToolMenuEntryScript` class + # Wrap it in a try except block for instances where + # the unreal module has not loaded yet. + + try: + entry = BaseActionMenuEntry( + callable_method, + parent=self + ) + menu_entry_name = menu_name.replace(" ", "") + + entry.init_entry( + self.TOOLBAR_OWNER, + f"{self._top_level_menu}.{menu_entry_name}", + menu_entry_name, + label_name or menu_name, + tool_tip=description or "" + ) + + # Add the entry to our tracked list + self.menu_entries.append(entry) + + # Get the registered top level menu + menu = self._tool_menus.find_menu(self._top_level_menu) + + # Add the entry object to the menu + menu.add_menu_entry_object(entry) + + except Exception as err: + raise RuntimeError( + "Its possible unreal hasn't loaded yet. Here's the " + "error that occurred: {err}".format(err=err) + ) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/__init__.py new file mode 100644 index 0000000000..6e68ce8285 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/__init__.py @@ -0,0 +1,8 @@ +from . import client, factory +from .base_ue_rpc import BaseRPC + +__all__ = [ + "client", + "factory", + "BaseRPC" +] diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/base_server.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/base_server.py new file mode 100644 index 0000000000..c48e996e47 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/base_server.py @@ -0,0 +1,272 @@ +import os +import sys +import abc +import queue +import time +import logging +import threading +from xmlrpc.server import SimpleXMLRPCServer + +# importlib machinery needs to be available for importing client modules +from importlib.machinery import SourceFileLoader + +logger = logging.getLogger(__name__) + +EXECUTION_QUEUE = queue.Queue() +RETURN_VALUE_NAME = 'RPC_SERVER_RETURN_VALUE' +ERROR_VALUE_NAME = 'RPC_SERVER_ERROR_VALUE' + + +def run_in_main_thread(callable_instance, *args): + """ + Runs the provided callable instance in the main thread by added it to a que + that is processed by a recurring event in an integration like a timer. + + :param call callable_instance: A callable. + :return: The return value of any call from the client. + """ + timeout = int(os.environ.get('RPC_TIME_OUT', 20)) + + globals().pop(RETURN_VALUE_NAME, None) + globals().pop(ERROR_VALUE_NAME, None) + EXECUTION_QUEUE.put((callable_instance, args)) + + for attempt in range(timeout * 10): + if RETURN_VALUE_NAME in globals(): + return globals().get(RETURN_VALUE_NAME) + elif ERROR_VALUE_NAME in globals(): + raise globals()[ERROR_VALUE_NAME] + else: + time.sleep(0.1) + + if RETURN_VALUE_NAME not in globals(): + raise TimeoutError( + f'The call "{callable_instance.__name__}" timed out because it hit the timeout limit' + f' of {timeout} seconds.' + ) + + +def execute_queued_calls(*extra_args): + """ + Runs calls in the execution que till they are gone. Designed to be passed to a + recurring event in an integration like a timer. + """ + while not EXECUTION_QUEUE.empty(): + if RETURN_VALUE_NAME not in globals(): + callable_instance, args = EXECUTION_QUEUE.get() + try: + globals()[RETURN_VALUE_NAME] = callable_instance(*args) + except Exception as error: + # store the error in the globals and re-raise it + globals()[ERROR_VALUE_NAME] = error + raise error + + +class BaseServer(SimpleXMLRPCServer): + def serve_until_killed(self): + """ + Serves till killed by the client. + """ + self.quit = False + while not self.quit: + self.handle_request() + + +class BaseRPCServer: + def __init__(self, name, port, is_thread=False): + """ + Initialize the base server. + + :param str name: The name of the server. + :param int port: The number of the server port. + :param bool is_thread: Whether or not the server is encapsulated in a thread. + """ + self.server = BaseServer( + (os.environ.get('RPC_HOST', '127.0.0.1'), port), + logRequests=False, + allow_none=True + ) + self.is_thread = is_thread + self.server.register_function(self.add_new_callable) + self.server.register_function(self.kill) + self.server.register_function(self.is_running) + self.server.register_function(self.set_env) + self.server.register_introspection_functions() + self.server.register_multicall_functions() + logger.info(f'Started RPC server "{name}".') + + @staticmethod + def is_running(): + """ + Responds if the server is running. + """ + return True + + @staticmethod + def set_env(name, value): + """ + Sets an environment variable in the server's python environment. + + :param str name: The name of the variable. + :param str value: The value. + """ + os.environ[name] = str(value) + + def kill(self): + """ + Kill the running server from the client. Only if running in blocking mode. + """ + self.server.quit = True + return True + + def add_new_callable(self, callable_name, code, client_system_path, remap_pairs=None): + """ + Adds a new callable defined in the client to the server. + + :param str callable_name: The name of the function that will added to the server. + :param str code: The code of the callable that will be added to the server. + :param list[str] client_system_path: The list of python system paths from the client. + :param list(tuple) remap_pairs: A list of tuples with first value being the client python path root and the + second being the new server path root. This can be useful if the client and server are on two different file + systems and the root of the import paths need to be dynamically replaced. + :return str: A response message back to the client. + """ + for path in client_system_path: + # if a list of remap pairs are provided, they will be remapped before being added to the system path + for client_path_root, matching_server_path_root in remap_pairs or []: + if path.startswith(client_path_root): + path = os.path.join( + matching_server_path_root, + path.replace(client_path_root, '').replace(os.sep, '/').strip('/') + ) + + if path not in sys.path: + sys.path.append(path) + + # run the function code + exec(code) + callable_instance = locals().copy().get(callable_name) + + # grab it from the locals and register it with the server + if callable_instance: + if self.is_thread: + self.server.register_function( + self.thread_safe_call(callable_instance), + callable_name + ) + else: + self.server.register_function( + callable_instance, + callable_name + ) + return f'The function "{callable_name}" has been successfully registered with the server!' + + +class BaseRPCServerThread(threading.Thread, BaseRPCServer): + def __init__(self, name, port): + """ + Initialize the base rpc server. + + :param str name: The name of the server. + :param int port: The number of the server port. + """ + threading.Thread.__init__(self, name=name, daemon=True) + BaseRPCServer.__init__(self, name, port, is_thread=True) + + def run(self): + """ + Overrides the run method. + """ + self.server.serve_forever() + + @abc.abstractmethod + def thread_safe_call(self, callable_instance, *args): + """ + Implements thread safe execution of a call. + """ + return + + +class BaseRPCServerManager: + @abc.abstractmethod + def __init__(self): + """ + Initialize the server manager. + Note: when this class is subclassed `name`, `port`, `threaded_server_class` need to be defined. + """ + self.server_thread = None + self.server_blocking = None + self._server = None + + def start_server_thread(self): + """ + Starts the server in a thread. + """ + self.server_thread = self.threaded_server_class(self.name, self.port) + self._server = self.server_thread.server + self.server_thread.start() + + def start_server_blocking(self): + """ + Starts the server in the main thread, which blocks all other processes. This can only + be killed by the client. + """ + self.server_blocking = BaseRPCServer(self.name, self.port) + self._server = self.server_blocking.server + self._server.serve_until_killed() + + def start(self, threaded=True): + """ + Starts the server. + + :param bool threaded: Whether or not to start the server in a thread. If not threaded + it will block all other processes. + """ + # start the server in a thread + if threaded and not self.server_thread: + self.start_server_thread() + + # start the blocking server + elif not threaded and not self.server_blocking: + self.start_server_blocking() + + else: + logger.info(f'RPC server "{self.name}" is already running...') + + def is_running(self): + """ + Checks to see if a blocking or threaded RPC server is still running + """ + if self._server: + try: + return self._server.is_running() + except (AttributeError, RuntimeError, Exception): + return False + + return False + + def get_server(self): + """ + Returns the rpc server running. This is useful when executing in a + thread and not blocking + """ + if not self._server: + raise RuntimeError("There is no server configured for this Manager") + + return self._server + + def shutdown(self): + """ + Shuts down the server. + """ + if self.server_thread: + logger.info(f'RPC server "{self.name}" is shutting down...') + + # kill the server in the thread + if self._server: + self._server.shutdown() + self._server.server_close() + + self.server_thread.join() + + logger.info(f'RPC server "{self.name}" has shutdown.') diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/base_ue_rpc.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/base_ue_rpc.py new file mode 100644 index 0000000000..45bf5706e1 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/base_ue_rpc.py @@ -0,0 +1,241 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +import os +from abc import abstractmethod +import traceback + +from deadline_rpc.client import RPCClient + +import unreal +import __main__ + + +class _RPCContextManager: + """ + Context manager used for automatically marking a task as complete after + the statement is done executing + """ + + def __init__(self, proxy, task_id): + """ + Constructor + """ + # RPC Client proxy + self._proxy = proxy + + # Current task id + self._current_task_id = task_id + + def __enter__(self): + return self._proxy + + def __exit__(self, exc_type, exc_val, exc_tb): + """ + Called when the context manager exits + """ + # Tell the server the task is complete + self._proxy.complete_task(self._current_task_id) + + +class BaseRPC: + """ + Base class for communicating with a Deadline RPC server. It is + recommended this class is subclassed for any script that need to + communicate with deadline. The class automatically handles connecting and + marking tasks as complete when some abstract methods are implemented + """ + + def __init__(self, port=None, ignore_rpc=False, verbose=False): + """ + This allows you to get an instance of the class without expecting + an automatic connection to a rpc server. This will allow you to have + a class that can both be executed in a deadline commandline interface or + as a class instance. + :param port: Optional port to connect to + :param ignore_rpc: Flag to short circuit connecting to a rpc server + """ + self._ignore_rpc = ignore_rpc + self._proxy = None + if not self._ignore_rpc: + if not port: + try: + port = os.environ["DEADLINE_RPC_PORT"] + except KeyError: + raise RuntimeError( + "There was no port specified for the rpc server" + ) + + self._port = int(port) + + # Make a connection to the RPC server + self._proxy = self.__establish_connection() + + self.current_task_id = -1 # Setting this to -1 allows us to + # render the first task. i.e task 0 + self._get_next_task = True + self._tick_handle = None + + self._verbose_logging = verbose + + # Set up a property to notify the class when a task is complete + self.__create_on_task_complete_global() + self.task_complete = False + self._sent_task_status = False + + # Start getting tasks to process + self._execute() + + @staticmethod + def __create_on_task_complete_global(): + """ + Creates a property in the globals that allows fire and forget tasks + to notify the class when a task is complete and allowing it to get + the next task + :return: + """ + if not hasattr(__main__, "__notify_task_complete__"): + __main__.__notify_task_complete__ = False + + return __main__.__notify_task_complete__ + + def __establish_connection(self): + """ + Makes a connection to the Deadline RPC server + """ + print(f"Connecting to rpc server on port `{self._port}`") + try: + _client = RPCClient(port=int(self._port)) + proxy = _client.proxy + proxy.connect() + except Exception: + raise + else: + if not proxy.is_connected(): + raise RuntimeError( + "A connection could not be made with the server" + ) + print(f"Connection to server established!") + return proxy + + def _wait_for_next_task(self, delta_seconds): + """ + Checks to see if there are any new tasks and executes when there is + :param delta_seconds: + :return: + """ + + # skip if our task is the same as previous + if self.proxy.get_task_id() == self.current_task_id: + if self._verbose_logging: + print("Waiting on next task..") + return + + print("New task received!") + + # Make sure we are explicitly told the task is complete by clearing + # the globals when we get a new task + __main__.__notify_task_complete__ = False + self.task_complete = False + + # Unregister the tick handle and execute the task + unreal.unregister_slate_post_tick_callback(self._tick_handle) + self._tick_handle = None + + # Set the current task and execute + self.current_task_id = self.proxy.get_task_id() + self._get_next_task = False + + print(f"Executing task `{self.current_task_id}`") + self.proxy.set_status_message("Executing task command") + + # Execute the next task + # Make sure we fail the job if we encounter any exceptions and + # provide the traceback to the proxy server + try: + self.execute() + except Exception: + trace = traceback.format_exc() + print(trace) + self.proxy.fail_render(trace) + raise + + # Start a non-blocking loop that waits till its notified a task is + # complete + self._tick_handle = unreal.register_slate_post_tick_callback( + self._wait_on_task_complete + ) + + def _wait_on_task_complete(self, delta_seconds): + """ + Waits till a task is mark as completed + :param delta_seconds: + :return: + """ + if self._verbose_logging: + print("Waiting on task to complete..") + if not self._sent_task_status: + self.proxy.set_status_message("Waiting on task completion..") + self._sent_task_status = True + if __main__.__notify_task_complete__ or self.task_complete: + + # Exiting the waiting loop + unreal.unregister_slate_post_tick_callback(self._tick_handle) + self._tick_handle = None + + print("Task marked complete. Getting next Task") + self.proxy.set_status_message("Task complete!") + + # Reset the task status notification + self._sent_task_status = False + + # Automatically marks a task complete when the execute function + # exits + with _RPCContextManager(self.proxy, self.current_task_id): + + self._get_next_task = True + + # This will allow us to keep getting tasks till the process is + # closed + self._execute() + + def _execute(self): + """ + Start the execution process + """ + + if self._get_next_task and not self._ignore_rpc: + + # register a callback with the editor that will check and execute + # the task on editor tick + self._tick_handle = unreal.register_slate_post_tick_callback( + self._wait_for_next_task + ) + + @property + def proxy(self): + """ + Returns an instance of the Client proxy + :return: + """ + if not self._proxy: + raise RuntimeError("There is no connected proxy!") + + return self._proxy + + @property + def is_connected(self): + """ + Property that returns if a connection was made with the server + :return: + """ + return self.proxy.is_connected() + + @abstractmethod + def execute(self): + """ + Abstract methods that is executed to perform a task job/command. + This method must be implemented when communicating with a Deadline + RPC server + :return: + """ + pass diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/client.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/client.py new file mode 100644 index 0000000000..d0fc7713c3 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/client.py @@ -0,0 +1,103 @@ +import os +import re +import logging +import inspect +from xmlrpc.client import ( + ServerProxy, + Unmarshaller, + Transport, + ExpatParser, + Fault, + ResponseError +) +logger = logging.getLogger(__package__) + + +class RPCUnmarshaller(Unmarshaller): + def __init__(self, *args, **kwargs): + Unmarshaller.__init__(self, *args, **kwargs) + self.error_pattern = re.compile(r'(?P[^:]*):(?P.*$)') + self.builtin_exceptions = self._get_built_in_exceptions() + + @staticmethod + def _get_built_in_exceptions(): + """ + Gets a list of the built in exception classes in python. + + :return list[BaseException] A list of the built in exception classes in python: + """ + builtin_exceptions = [] + for builtin_name, builtin_class in globals().get('__builtins__').items(): + if inspect.isclass(builtin_class) and issubclass(builtin_class, BaseException): + builtin_exceptions.append(builtin_class) + + return builtin_exceptions + + def close(self): + """ + Override so we redefine the unmarshaller. + + :return tuple: A tuple of marshallables. + """ + if self._type is None or self._marks: + raise ResponseError() + + if self._type == 'fault': + marshallables = self._stack[0] + match = self.error_pattern.match(marshallables.get('faultString', '')) + if match: + exception_name = match.group('exception').strip("") + exception_message = match.group('exception_message') + + if exception_name: + for exception in self.builtin_exceptions: + if exception.__name__ == exception_name: + raise exception(exception_message) + + # if all else fails just raise the fault + raise Fault(**marshallables) + return tuple(self._stack) + + +class RPCTransport(Transport): + def getparser(self): + """ + Override so we can redefine our transport to use its own custom unmarshaller. + + :return tuple(ExpatParser, RPCUnmarshaller): The parser and unmarshaller instances. + """ + unmarshaller = RPCUnmarshaller() + parser = ExpatParser(unmarshaller) + return parser, unmarshaller + + +class RPCServerProxy(ServerProxy): + def __init__(self, *args, **kwargs): + """ + Override so we can redefine the ServerProxy to use our custom transport. + """ + kwargs['transport'] = RPCTransport() + ServerProxy.__init__(self, *args, **kwargs) + + +class RPCClient: + def __init__(self, port, marshall_exceptions=True): + """ + Initializes the rpc client. + + :param int port: A port number the client should connect to. + :param bool marshall_exceptions: Whether or not the exceptions should be marshalled. + """ + if marshall_exceptions: + proxy_class = RPCServerProxy + else: + proxy_class = ServerProxy + + server_ip = os.environ.get('RPC_SERVER_IP', '127.0.0.1') + + self.proxy = proxy_class( + "http://{server_ip}:{port}".format(server_ip=server_ip, port=port), + allow_none=True, + ) + self.marshall_exceptions = marshall_exceptions + self.port = port diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/exceptions.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/exceptions.py new file mode 100644 index 0000000000..b31e4db881 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/exceptions.py @@ -0,0 +1,78 @@ +class BaseRPCException(Exception): + """ + Raised when a rpc class method is not authored as a static method. + """ + def __init__(self, message=None, line_link=''): + self.message = message + line_link + super().__init__(self.message) + + +class InvalidClassMethod(BaseRPCException): + """ + Raised when a rpc class method is not authored as a static method. + """ + def __init__(self, cls, method, message=None, line_link=''): + self.message = message + + if message is None: + self.message = ( + f'\n {cls.__name__}.{method.__name__} is not a static method. Please decorate with @staticmethod.' + ) + BaseRPCException.__init__(self, self.message, line_link) + + +class InvalidTestCasePort(BaseRPCException): + """ + Raised when a rpc test case class does not have a port defined. + """ + def __init__(self, cls, message=None, line_link=''): + self.message = message + + if message is None: + self.message = f'\n You must set {cls.__name__}.port to a supported RPC port.' + BaseRPCException.__init__(self, self.message, line_link) + + +class InvalidKeyWordParameters(BaseRPCException): + """ + Raised when a rpc function has key word arguments in its parameters. + """ + def __init__(self, function, kwargs, message=None, line_link=''): + self.message = message + + if message is None: + self.message = ( + f'\n Keyword arguments "{kwargs}" were found on "{function.__name__}". The RPC client does not ' + f'support key word arguments . Please change your code to use only arguments.' + ) + BaseRPCException.__init__(self, self.message, line_link) + + +class UnsupportedArgumentType(BaseRPCException): + """ + Raised when a rpc function's argument type is not supported. + """ + def __init__(self, function, arg, supported_types, message=None, line_link=''): + self.message = message + + if message is None: + self.message = ( + f'\n "{function.__name__}" has an argument of type "{arg.__class__.__name__}". The only types that are' + f' supported by the RPC client are {[supported_type.__name__ for supported_type in supported_types]}.' + ) + BaseRPCException.__init__(self, self.message, line_link) + + +class FileNotSavedOnDisk(BaseRPCException): + """ + Raised when a rpc function is called in a context where it is not a saved file on disk. + """ + def __init__(self, function, message=None): + self.message = message + + if message is None: + self.message = ( + f'\n "{function.__name__}" is not being called from a saved file. The RPC client does not ' + f'support code that is not saved. Please save your code to a file on disk and re-run it.' + ) + BaseRPCException.__init__(self, self.message) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/factory.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/factory.py new file mode 100644 index 0000000000..b5714f87c0 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/factory.py @@ -0,0 +1,249 @@ +import os +import re +import sys +import logging +import types +import inspect +import textwrap +import unittest +from xmlrpc.client import Fault + +from .client import RPCClient +from .validations import ( + validate_key_word_parameters, + validate_class_method, + get_source_file_path, + get_line_link, + validate_arguments, + validate_file_is_saved, +) + +logger = logging.getLogger(__package__) + + +class RPCFactory: + def __init__(self, rpc_client, remap_pairs=None, default_imports=None): + self.rpc_client = rpc_client + self.file_path = None + self.remap_pairs = remap_pairs + self.default_imports = default_imports or [] + + def _get_callstack_references(self, code, function): + """ + Gets all references for the given code. + + :param list[str] code: The code of the callable. + :param callable function: A callable. + :return str: The new code of the callable with all its references added. + """ + import_code = self.default_imports + + client_module = inspect.getmodule(function) + self.file_path = get_source_file_path(function) + + # if a list of remap pairs have been set, the file path will be remapped to the new server location + # Note: The is useful when the server and client are not on the same machine. + server_module_path = self.file_path + for client_path_root, matching_server_path_root in self.remap_pairs or []: + if self.file_path.startswith(client_path_root): + server_module_path = os.path.join( + matching_server_path_root, + self.file_path.replace(client_path_root, '').replace(os.sep, '/').strip('/') + ) + break + + for key in dir(client_module): + for line_number, line in enumerate(code): + if line.startswith('def '): + continue + + if key in re.split('\.|\(| ', line.strip()): + if os.path.basename(self.file_path) == '__init__.py': + base_name = os.path.basename(os.path.dirname(self.file_path)) + else: + base_name = os.path.basename(self.file_path) + + module_name, file_extension = os.path.splitext(base_name) + import_code.append( + f'{module_name} = SourceFileLoader("{module_name}", r"{server_module_path}").load_module()' + ) + import_code.append(f'from {module_name} import {key}') + break + + return textwrap.indent('\n'.join(import_code), ' ' * 4) + + def _get_code(self, function): + """ + Gets the code from a callable. + + :param callable function: A callable. + :return str: The code of the callable. + """ + code = textwrap.dedent(inspect.getsource(function)).split('\n') + code = [line for line in code if not line.startswith('@')] + + # get import code and insert them inside the function + import_code = self._get_callstack_references(code, function) + code.insert(1, import_code) + + # log out the generated code + if os.environ.get('RPC_LOG_CODE'): + for line in code: + logger.debug(line) + + return code + + def _register(self, function): + """ + Registers a given callable with the server. + + :param callable function: A callable. + :return Any: The return value. + """ + code = self._get_code(function) + try: + # if additional paths are explicitly set, then use them. This is useful with the client is on another + # machine and the python paths are different + additional_paths = list(filter(None, os.environ.get('RPC_ADDITIONAL_PYTHON_PATHS', '').split(','))) + + if not additional_paths: + # otherwise use the current system path + additional_paths = sys.path + + response = self.rpc_client.proxy.add_new_callable( + function.__name__, '\n'.join(code), + additional_paths + ) + if os.environ.get('RPC_DEBUG'): + logger.debug(response) + + except ConnectionRefusedError: + server_name = os.environ.get(f'RPC_SERVER_{self.rpc_client.port}', self.rpc_client.port) + raise ConnectionRefusedError(f'No connection could be made with "{server_name}"') + + def run_function_remotely(self, function, args): + """ + Handles running the given function on remotely. + + :param callable function: A function reference. + :param tuple(Any) args: The function's arguments. + :return callable: A remote callable. + """ + validate_arguments(function, args) + + # get the remote function instance + self._register(function) + remote_function = getattr(self.rpc_client.proxy, function.__name__) + + current_frame = inspect.currentframe() + outer_frame_info = inspect.getouterframes(current_frame) + # step back 2 frames in the callstack + caller_frame = outer_frame_info[2][0] + # create a trace back that is relevant to the remote code rather than the code transporting it + call_traceback = types.TracebackType(None, caller_frame, caller_frame.f_lasti, caller_frame.f_lineno) + # call the remote function + if not self.rpc_client.marshall_exceptions: + # if exceptions are not marshalled then receive the default Faut + return remote_function(*args) + + # otherwise catch them and add a line link to them + try: + return remote_function(*args) + except Exception as exception: + stack_trace = str(exception) + get_line_link(function) + if isinstance(exception, Fault): + raise Fault(exception.faultCode, exception.faultString) + raise exception.__class__(stack_trace).with_traceback(call_traceback) + + +def remote_call(port, default_imports=None, remap_pairs=None): + """ + A decorator that makes this function run remotely. + + :param Enum port: The name of the port application i.e. maya, blender, unreal. + :param list[str] default_imports: A list of import commands that include modules in every call. + :param list(tuple) remap_pairs: A list of tuples with first value being the client file path root and the + second being the matching server path root. This can be useful if the client and server are on two different file + systems and the root of the import paths need to be dynamically replaced. + """ + def decorator(function): + def wrapper(*args, **kwargs): + validate_file_is_saved(function) + validate_key_word_parameters(function, kwargs) + rpc_factory = RPCFactory( + rpc_client=RPCClient(port), + remap_pairs=remap_pairs, + default_imports=default_imports + ) + return rpc_factory.run_function_remotely(function, args) + return wrapper + return decorator + + +def remote_class(decorator): + """ + A decorator that makes this class run remotely. + + :param remote_call decorator: The remote call decorator. + :return: A decorated class. + """ + def decorate(cls): + for attribute, value in cls.__dict__.items(): + validate_class_method(cls, value) + if callable(getattr(cls, attribute)): + setattr(cls, attribute, decorator(getattr(cls, attribute))) + return cls + return decorate + + +class RPCTestCase(unittest.TestCase): + """ + Subclasses unittest.TestCase to implement a RPC compatible TestCase. + """ + port = None + remap_pairs = None + default_imports = None + + @classmethod + def run_remotely(cls, method, args): + """ + Run the given method remotely. + + :param callable method: A method to wrap. + """ + default_imports = cls.__dict__.get('default_imports', None) + port = cls.__dict__.get('port', None) + remap_pairs = cls.__dict__.get('remap_pairs', None) + rpc_factory = RPCFactory( + rpc_client=RPCClient(port), + default_imports=default_imports, + remap_pairs=remap_pairs + ) + return rpc_factory.run_function_remotely(method, args) + + def _callSetUp(self): + """ + Overrides the TestCase._callSetUp method by passing it to be run remotely. + Notice None is passed as an argument instead of self. This is because only static methods + are allowed by the RPCClient. + """ + self.run_remotely(self.setUp, [None]) + + def _callTearDown(self): + """ + Overrides the TestCase._callTearDown method by passing it to be run remotely. + Notice None is passed as an argument instead of self. This is because only static methods + are allowed by the RPCClient. + """ + # notice None is passed as an argument instead of self so self can't be used + self.run_remotely(self.tearDown, [None]) + + def _callTestMethod(self, method): + """ + Overrides the TestCase._callTestMethod method by capturing the test case method that would be run and then + passing it to be run remotely. Notice no arguments are passed. This is because only static methods + are allowed by the RPCClient. + + :param callable method: A method from the test case. + """ + self.run_remotely(method, []) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/server.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/server.py new file mode 100644 index 0000000000..750e8f978b --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/server.py @@ -0,0 +1,29 @@ +import os +import sys +sys.path.append(os.path.dirname(__file__)) + +from base_server import BaseRPCServerManager, BaseRPCServerThread + + +class RPCServerThread(BaseRPCServerThread): + def thread_safe_call(self, callable_instance, *args): + """ + Implementation of a thread safe call in Unreal. + """ + return callable_instance(*args) + + +class RPCServer(BaseRPCServerManager): + def __init__(self, port=None): + """ + Initialize the blender rpc server, with its name and specific port. + """ + super(RPCServer, self).__init__() + self.name = 'RPCServer' + self.port = int(os.environ.get('RPC_PORT', port)) + self.threaded_server_class = RPCServerThread + + +if __name__ == '__main__': + rpc_server = RPCServer() + rpc_server.start(threaded=False) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/validations.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/validations.py new file mode 100644 index 0000000000..e4a9587700 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_rpc/validations.py @@ -0,0 +1,105 @@ +import inspect + +from .exceptions import ( + InvalidClassMethod, + InvalidTestCasePort, + InvalidKeyWordParameters, + UnsupportedArgumentType, + FileNotSavedOnDisk, +) + + +def get_source_file_path(function): + """ + Gets the full path to the source code. + + :param callable function: A callable. + :return str: A file path. + """ + client_module = inspect.getmodule(function) + return client_module.__file__ + + +def get_line_link(function): + """ + Gets the line number of a function. + + :param callable function: A callable. + :return int: The line number + """ + lines, line_number = inspect.getsourcelines(function) + file_path = get_source_file_path(function) + return f' File "{file_path}", line {line_number}' + + +def validate_arguments(function, args): + """ + Validates arguments to ensure they are a supported type. + + :param callable function: A function reference. + :param tuple(Any) args: A list of arguments. + """ + supported_types = [str, int, float, tuple, list, dict, bool] + line_link = get_line_link(function) + for arg in args: + if arg is None: + continue + + if type(arg) not in supported_types: + raise UnsupportedArgumentType(function, arg, supported_types, line_link=line_link) + + +def validate_test_case_class(cls): + """ + This is use to validate a subclass of RPCTestCase. While building your test + suite you can call this method on each class preemptively to validate that it + was defined correctly. + + :param RPCTestCase cls: A class. + :param str file_path: Optionally, a file path to the test case can be passed to give + further context into where the error is occurring. + """ + line_link = get_line_link(cls) + if not cls.__dict__.get('port'): + raise InvalidTestCasePort(cls, line_link=line_link) + + for attribute, method in cls.__dict__.items(): + if callable(method) and not isinstance(method, staticmethod): + if method.__name__.startswith('test'): + raise InvalidClassMethod(cls, method, line_link=line_link) + + +def validate_class_method(cls, method): + """ + Validates a method on a class. + + :param Any cls: A class. + :param callable method: A callable. + """ + if callable(method) and not isinstance(method, staticmethod): + line_link = get_line_link(method) + raise InvalidClassMethod(cls, method, line_link=line_link) + + +def validate_key_word_parameters(function, kwargs): + """ + Validates a method on a class. + + :param callable function: A callable. + :param dict kwargs: A dictionary of key word arguments. + """ + if kwargs: + line_link = get_line_link(function) + raise InvalidKeyWordParameters(function, kwargs, line_link=line_link) + + +def validate_file_is_saved(function): + """ + Validates that the file that the function is from is saved on disk. + + :param callable function: A callable. + """ + try: + inspect.getsourcelines(function) + except OSError: + raise FileNotSavedOnDisk(function) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_service.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_service.py new file mode 100644 index 0000000000..beee4a70a0 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_service.py @@ -0,0 +1,755 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +Thinkbox Deadline REST API service plugin used to submit and query jobs from a Deadline server +""" + +# Built-in +import json +import logging +import platform + +from getpass import getuser +from threading import Thread, Event + +# Internal +from deadline_job import DeadlineJob +from deadline_http import DeadlineHttp +from deadline_enums import DeadlineJobState, DeadlineJobStatus, HttpRequestType +from deadline_utils import get_editor_deadline_globals +from deadline_command import DeadlineCommand + +# Third-party +import unreal + +logger = logging.getLogger("DeadlineService") +logger.setLevel(logging.INFO) + + +class _Singleton(type): + """ + Singleton metaclass for the Deadline service + """ + # ------------------------------------------------------------------------------------------------------------------ + # Class Variables + + _instances = {} + + # ------------------------------------------------------------------------------------------------------------------ + # Magic Methods + + def __call__(cls, *args, **kwargs): + """ + Determines the initialization behavior of this class + """ + if cls not in cls._instances: + cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs) + + return cls._instances[cls] + + +# TODO: Make this a native Subsystem in the editor +class DeadlineService(metaclass=_Singleton): + """ + Singleton class to handle Deadline submissions. + We are using a singleton class as there is no need to have multiple instances of the service. This allows job + queries and submissions to be tracked by a single entity and be a source of truth on the client about all jobs + created in the current session. + """ + + # ------------------------------------------------------------------------------------------------------------------ + # Magic Methods + + def __init__(self, host=None, auto_start_job_updates=False, service_update_interval=1.0): + """ + Deadline service class for submitting jobs to deadline and querying data from deadline + :param str host: Deadline host + :param bool auto_start_job_updates: This flag auto starts processing jobs when the service is initialized + tracked by the service + :param float service_update_interval: Interval(seconds) for job update frequency. Default is 2.0 seconds + """ + + # Track a dictionary of jobs registered with the service. This dictionary contains job object instance ID and a + # reference to the job instance object and deadline job ID. + # i.e {"instance_object_id": {"object": , "job_id": 0001 or None}} + self._current_jobs = {} + self._submitted_jobs = {} # Similar to the current jobs, this tracks all jobs submitted + self._failed_jobs = set() + self._completed_jobs = set() + + # This flag determines if the service should deregister a job when it fails on the server + self.deregister_job_on_failure = True + + # Thread execution variables + self._event_thread = None + self._exit_auto_update = False + self._update_thread_event = Event() + + self._service_update_interval = service_update_interval + + # A timer for executing job update functions on an interval + self._event_timer_manager = self.get_event_manager() + self._event_handler = None + + # Use DeadlineCommand by defaut + self._use_deadline_command = self._get_use_deadline_cmd() # True # TODO: hardcoded for testing, change to project read setting + + # Get/Set service host + self._host = host or self._get_deadline_host() + + # Get deadline https instance + self._http_server = DeadlineHttp(self.host) + + if auto_start_job_updates: + self.start_job_updates() + + # ------------------------------------------------------------------------------------------------------------------ + # Public Properties + + @property + def pools(self): + """ + Returns the current list of pools found on the server + :return: List of pools on the server + """ + return self._get_pools() + + @property + def groups(self): + """ + Returns the current list of groups found on the server + :return: List of groups on the server + """ + return self._get_groups() + + @property + def use_deadline_command(self): + """ + Returns the current value of the use deadline command flag + :return: True if the service uses the deadline command, False otherwise + """ + return self._use_deadline_command + + @use_deadline_command.setter + def use_deadline_command(self, value): + """ + Sets the use deadline command flag + :param value: True if the service uses the deadline command, False otherwise + """ + self._use_deadline_command = value + + @property + def host(self): + """ + Returns the server url used by the service + :return: Service url + """ + return self._host + + @host.setter + def host(self, value): + """ + Set the server host on the service + :param value: host value + """ + self._host = value + + # When the host service is updated, get a new connection to that host + self._http_server = DeadlineHttp(self._host) + + @property + def current_jobs(self): + """ + Returns the global current jobs tracked by the service + :return: List of Jobs tracked by the service + """ + return [value["object"] for value in self._current_jobs.values()] + + @property + def failed_jobs(self): + """ + Returns the failed jobs tracked by the service + :return: List of failed Jobs tracked by the service + """ + return self._failed_jobs + + @property + def completed_jobs(self): + """ + Returns the completed jobs tracked by the service + :return: List of completed Jobs tracked by the service + """ + return self._completed_jobs + + # ------------------------------------------------------------------------------------------------------------------ + # Protected Methods + + def _get_pools(self): + """ + This method updates the set of pools tracked by the service + """ + if self._get_use_deadline_cmd(): # if self._use_deadline_command: + return DeadlineCommand().get_pools() + else: + response = self.send_http_request( + HttpRequestType.GET, + "api/pools", + headers={'Content-Type': 'application/json'} + ) + return json.loads(response.decode('utf-8')) + + def _get_groups(self): + """ + This method updates the set of groups tracked by the service + """ + if self._get_use_deadline_cmd(): # if self._use_deadline_command: + return DeadlineCommand().get_groups() + else: + response = self.send_http_request( + HttpRequestType.GET, + "api/groups", + headers={'Content-Type': 'application/json'} + ) + return json.loads(response.decode('utf-8')) + + def _register_job(self, job_object, deadline_job_id=None): + """ + This method registers the job object with the service + :param DeadlineJob job_object: Deadline Job object + :param str deadline_job_id: ID of job returned from the server + """ + + # Set the job Id on the job. The service + # should be allowed to set this protected property on the job object as this property should natively + # not be allowed to be set externally + job_object._job_id = deadline_job_id + + job_data = { + str(id(job_object)): + { + "object": job_object, + "job_id": deadline_job_id + } + } + + self._submitted_jobs.update(job_data) + self._current_jobs.update(job_data) + + def _deregister_job(self, job_object): + """ + This method removes the current job object from the tracked jobs + :param DeadlineJob job_object: Deadline job object + """ + + if str(id(job_object)) in self._current_jobs: + self._current_jobs.pop(str(id(job_object)), f"{job_object} could not be found") + + def _update_tracked_job_by_status(self, job_object, job_status, update_job=False): + """ + This method moves the job object from the tracked list based on the current job status + :param DeadlineJob job_object: Deadline job object + :param DeadlineJobStatus job_status: Deadline job status + :param bool update_job: Flag to update the job object's status to the passed in job status + """ + + # Convert the job status into the appropriate enum. This will raise an error if the status enum does not exist. + # If a valid enum is passed into this function, the enum is return + job_status = job_object.get_job_status_enum(job_status) + + # If the job has an unknown status, remove it from the currently tracked jobs by the service. Note we are not + # de-registering failed jobs unless explicitly set, that's because a failed job can be re-queued and + # completed on the next try. + # So we do not want to preemptively remove this job from the tracked jobs by the service. + if job_status is DeadlineJobStatus.UNKNOWN: + self._deregister_job(job_object) + self._failed_jobs.add(job_object) + + elif job_status is DeadlineJobStatus.COMPLETED: + self._deregister_job(job_object) + self._completed_jobs.add(job_object) + + elif job_status is DeadlineJobStatus.FAILED: + if self.deregister_job_on_failure: + self._deregister_job(job_object) + self._failed_jobs.add(job_object) + + if update_job: + job_object.job_status = job_status + + # ------------------------------------------------------------------------------------------------------------------ + # Public Methods + + def send_http_request(self, request_type, api_url, payload=None, fields=None, headers=None, retries=0): + """ + This method is used to upload or receive data from the Deadline server. + :param HttpRequestType request_type: HTTP request verb. i.e GET/POST/PUT/DELETE + :param str api_url: URL relative path queries. Example: /jobs , /pools, /jobs?JobID=0000 + :param payload: Data object to POST/PUT to Deadline server + :param dict fields: Request fields. This is typically used in files and binary uploads + :param dict headers: Header data for request + :param int retries: The number of retries to attempt before failing request. Defaults to 0. + :return: JSON object response from the server + """ + + # Make sure we always have the most up-to-date host + if not self.host or (self.host != self._get_deadline_host()): + self.host = self._get_deadline_host() + + try: + response = self._http_server.send_http_request( + request_type, + api_url, + payload=payload, + fields=fields, + headers=headers, + retries=retries + ) + + except Exception as err: + raise DeadlineServiceError(f"Communication with {self.host} failed with err: \n{err}") + else: + return response + + def submit_job(self, job_object): + """ + This method submits the tracked job to the Deadline server + :param DeadlineJob job_object: Deadline Job object + :returns: Deadline `JobID` if an id was returned from the server + """ + self._validate_job_object(job_object) + + logger.debug(f"Submitting {job_object} to {self.host}..") + + if str(id(job_object)) in self._current_jobs: + logger.warning(f"{job_object} has already been added to the service") + + # Return the job ID of the submitted job + return job_object.job_id + + job_id = None + + job_data = job_object.get_submission_data() + + # Set the job data to return the job ID on submission + job_data.update(IdOnly="true") + + # Update the job data to include the user and machine submitting the job + # Update the username if one is not supplied + if "UserName" not in job_data["JobInfo"]: + + # NOTE: Make sure this matches the expected naming convention by the server else the user will get + # permission errors on job submission + # Todo: Make sure the username convention matches the username on the server + job_data["JobInfo"].update(UserName=getuser()) + + job_data["JobInfo"].update(MachineName=platform.node()) + + self._validate_job_info(job_data["JobInfo"]) + + if self._get_use_deadline_cmd(): # if self._use_deadline_command: + # Submit the job to the Deadline server using the Deadline command + # Todo: Add support for the Deadline command + job_id = DeadlineCommand().submit_job(job_data) + + else: + # Submit the job to the Deadline server using the HTTP API + try: + response = self.send_http_request( + HttpRequestType.POST, + "api/jobs", + payload=json.dumps(job_data).encode('utf-8'), + headers={'Content-Type': 'application/json'} + ) + + except DeadlineServiceError as exp: + logger.error( + f"An error occurred submitting {job_object} to Deadline host `{self.host}`.\n\t{str(exp)}" + ) + self._failed_jobs.add(job_object) + + else: + try: + response = json.loads(response.decode('utf-8')) + + # If an error occurs trying to decode the json data, most likely an error occurred server side thereby + # returning a string instead of the data requested. + # Raise the decoded error + except Exception as err: + raise DeadlineServiceError(f"An error occurred getting the server data:\n\t{response.decode('utf-8')}") + + job_id = response.get('_id', None) + if not job_id: + logger.warning( + f"No JobId was returned from the server for {job_object}. " + f"The service will not be able to get job details for this job!" + ) + else: + # Register the job with the service. + self._register_job(job_object, job_id) + + logger.info(f"Submitted `{job_object.job_name}` to Deadline. JobID: {job_id}") + + return job_id + + def get_job_details(self, job_object): + """ + This method gets the job details for the Deadline job + :param DeadlineJob job_object: Custom Deadline job object + :return: Job details object returned from the server. Usually a Json object + """ + + self._validate_job_object(job_object) + + if str(id(job_object)) not in self._current_jobs: + logger.warning( + f"{job_object} is currently not tracked by the service. The job has either not been submitted, " + f"its already completed or there was a problem with the job!" + ) + elif not job_object.job_id: + logger.error( + f"There is no JobID for {job_object}!" + ) + else: + + try: + job_details = self._http_server.get_job_details(job_object.job_id) + + except (Exception, RuntimeError): + # If an error occurred, most likely the job does not exist on the server anymore. Mark the job as + # unknown + self._update_tracked_job_by_status(job_object, DeadlineJobStatus.UNKNOWN, update_job=True) + else: + # Sometimes Deadline returns a status with a parenthesis after the status indicating the number of tasks + # executing. We only care about the status here so lets split the number of tasks out. + self._update_tracked_job_by_status(job_object, job_details["Job"]["Status"].split()[0]) + return job_details + + def send_job_command(self, job_object, command): + """ + Send a command to the Deadline server for the job + :param DeadlineJob job_object: Deadline job object + :param dict command: Command to send to the Deadline server + :return: Returns the response from the server + """ + self._validate_job_object(job_object) + + if not job_object.job_id: + raise RuntimeError("There is no Deadline job ID to send this command for.") + + try: + response = self._http_server.send_job_command( + job_object.job_id, + command + ) + except Exception as exp: + logger.error( + f"An error occurred getting the command result for {job_object} from Deadline host {self.host}. " + f"\n{exp}" + ) + return "Fail" + else: + if response != "Success": + logger.error(f"An error occurred executing command for {job_object}. \nError: {response}") + return "Fail" + + return response + + def change_job_state(self, job_object, state): + """ + This modifies a submitted job's state on the Deadline server. This can be used in job orchestration. For example + a job can be submitted as suspended/pending and this command can be used to update the state of the job to + active after submission. + :param DeadlineJob job_object: Deadline job object + :param DeadlineJobState state: State to set the job + :return: Submission results + """ + + self._validate_job_object(job_object) + + # Validate jobs state + if not isinstance(state, DeadlineJobState): + raise ValueError(f"`{state}` is not a valid state.") + + return self.send_job_command(job_object, {"Command": state.value}) + + def start_job_updates(self): + """ + This method starts an auto update on jobs in the service. + + The purpose of this system is to allow the service to automatically update the job details from the server. + This allows you to submit a job from your implementation and periodically poll the changes on the job as the + service will continuously update the job details. + Note: This function must explicitly be called or the `auto_start_job_updates` flag must be passed to the service + instance for this functionality to happen. + """ + # Prevent the event from being executed several times in succession + if not self._event_handler: + if not self._event_thread: + + # Create a thread for the job update function. This function takes the current list of jobs + # tracked by the service. The Thread owns an instance of the http connection. This allows the thread + # to have its own pool of http connections separate from the main service. A thread event is passed + # into the thread which allows the process events from the timer to reactivate the function. The + # purpose of this is to prevent unnecessary re-execution while jobs are being processed. + # This also allows the main service to stop function execution within the thread and allow it to cleanly + # exit. + + # HACK: For some odd reason, passing an instance of the service into the thread seems to work as + # opposed to passing in explicit variables. I would prefer explicit variables as the thread does not + # need to have access to the entire service object + + # Threading is used here as the editor runs python on the game thread. If a function call is + # executed on an interval (as this part of the service is designed to do), this will halt the editor + # every n interval to process the update event. A separate thread for processing events allows the + # editor to continue functions without interfering with the editor + + # TODO: Figure out a way to have updated variables in the thread vs passing the whole service instance + self._event_thread = Thread( + target=self._update_all_jobs, + args=(self,), + name="deadline_service_auto_update_thread", + daemon=True + ) + + # Start the thread + self._event_thread.start() + + else: + # If the thread is stopped, restart it. + if not self._event_thread.is_alive(): + self._event_thread.start() + + def process_events(): + """ + Function ran by the tick event for monitoring function execution inside of the auto update thread. + """ + # Since the editor ticks at a high rate, this monitors the current state of the function execution in + # the update thread. When a function is done executing, this resets the event on the function. + logger.debug("Processing current jobs.") + if self._update_thread_event.is_set(): + + logger.debug("Job processing complete, restarting..") + # Send an event to tell the thread to start the job processing loop + self._update_thread_event.clear() + + # Attach the thread executions to a timer event + self._event_timer_manager.on_timer_interval_delegate.add_callable(process_events) + + # Start the timer on an interval + self._event_handler = self._event_timer_manager.start_timer(self._service_update_interval) + + # Allow the thread to stop when a python shutdown is detected + unreal.register_python_shutdown_callback(self.stop_job_updates) + + def stop_job_updates(self): + """ + This method stops the auto update thread. This method should be explicitly called to stop the service from + continuously updating the current tracked jobs. + """ + if self._event_handler: + + # Remove the event handle to the tick event + self.stop_function_timer(self._event_timer_manager, self._event_handler) + self._event_handler = None + + if self._event_thread and self._event_thread.is_alive(): + # Force stop the thread + self._exit_auto_update = True + + # immediately stop the thread. Do not wait for jobs to complete. + self._event_thread.join(1.0) + + # Usually if a thread is still alive after a timeout, then something went wrong + if self._event_thread.is_alive(): + logger.error("An error occurred closing the auto update Thread!") + + # Reset the event, thread and tick handler + self._update_thread_event.set() + self._event_thread = None + + def get_job_object_by_job_id(self, job_id): + """ + This method returns the job object tracked by the service based on the deadline job ID + :param job_id: Deadline job ID + :return: DeadlineJob object + :rtype DeadlineJob + """ + + job_object = None + + for job in self._submitted_jobs.values(): + if job_id == job["job_id"]: + job_object = job["object"] + break + + return job_object + + # ------------------------------------------------------------------------------------------------------------------ + # Static Methods + + @staticmethod + def _validate_job_info(job_info): + """ + This method validates the job info dictionary to make sure + the information provided meets a specific standard + :param dict job_info: Deadline job info dictionary + :raises ValueError + """ + + # validate the job info plugin settings + if "Plugin" not in job_info or (not job_info["Plugin"]): + raise ValueError("No plugin was specified in the Job info dictionary") + + @staticmethod + def _get_use_deadline_cmd(): + """ + Returns the deadline command flag settings from the unreal project settings + :return: Deadline command settings unreal project + """ + try: + # This will be set on the deadline editor project settings + deadline_settings = unreal.get_default_object(unreal.DeadlineServiceEditorSettings) + + # Catch any other general exceptions + except Exception as exc: + unreal.log( + f"Caught Exception while getting use deadline command flag. Error: {exc}" + ) + + else: + return deadline_settings.deadline_command + + @staticmethod + def _get_deadline_host(): + """ + Returns the host settings from the unreal project settings + :return: Deadline host settings unreal project + """ + try: + # This will be set on the deadline editor project settings + deadline_settings = unreal.get_default_object(unreal.DeadlineServiceEditorSettings) + + # Catch any other general exceptions + except Exception as exc: + unreal.log( + f"Caught Exception while getting deadline host. Error: {exc}" + ) + + else: + return deadline_settings.deadline_host + + @staticmethod + def _validate_job_object(job_object): + """ + This method ensures the object passed in is of type DeadlineJob + :param DeadlineJob job_object: Python object + :raises: RuntimeError if the job object is not of type DeadlineJob + """ + # Using type checking instead of isinstance to prevent cyclical imports + if not isinstance(job_object, DeadlineJob): + raise DeadlineServiceError(f"Job is not of type DeadlineJob. Found {type(job_object)}!") + + @staticmethod + def _update_all_jobs(service): + """ + This method updates current running job properties in a thread. + :param DeadlineService service: Deadline service instance + """ + # Get a Deadline http instance inside for this function. This function is expected to be executed in a thread. + deadline_http = DeadlineHttp(service.host) + + while not service._exit_auto_update: + + while not service._update_thread_event.is_set(): + + # Execute the job update properties on the job object + for job_object in service.current_jobs: + + logger.debug(f"Updating {job_object} job properties") + + # Get the job details for this job and update the job details on the job object. The service + # should be allowed to set this protected property on the job object as this property should + # natively not be allowed to be set externally + try: + if job_object.job_id: + job_object.job_details = deadline_http.get_job_details(job_object.job_id) + + # If a job fails to get job details, log it, mark it unknown + except Exception as err: + logger.exception(f"An error occurred getting job details for {job_object}:\n\t{err}") + service._update_tracked_job_by_status( + job_object, + DeadlineJobStatus.UNKNOWN, + update_job=True + ) + + # Iterate over the jobs and update the tracked jobs by the service + for job in service.current_jobs: + service._update_tracked_job_by_status(job, job.job_status) + + service._update_thread_event.set() + + @staticmethod + def get_event_manager(): + """ + Returns an instance of an event timer manager + """ + return unreal.DeadlineServiceTimerManager() + + @staticmethod + def start_function_timer(event_manager, function, interval_in_seconds=2.0): + """ + Start a timer on a function within an interval + :param unreal.DeadlineServiceTimerManager event_manager: Unreal Deadline service timer manager + :param object function: Function to execute + :param float interval_in_seconds: Interval in seconds between function execution. Default is 2.0 seconds + :return: Event timer handle + """ + if not isinstance(event_manager, unreal.DeadlineServiceTimerManager): + raise TypeError( + f"The event manager is not of type `unreal.DeadlineServiceTimerManager`. Got {type(event_manager)}" + ) + + event_manager.on_timer_interval_delegate.add_callable(function) + + return event_manager.start_timer(interval_in_seconds) + + @staticmethod + def stop_function_timer(event_manager, time_handle): + """ + Stops the timer event + :param unreal.DeadlineServiceTimerManager event_manager: Service Event manager + :param time_handle: Time handle returned from the event manager + """ + event_manager.stop_timer(time_handle) + + +class DeadlineServiceError(Exception): + """ + General Exception class for the Deadline Service + """ + pass + + +def get_global_deadline_service_instance(): + """ + This method returns an instance of the service from + the interpreter globals. + :return: + """ + # This behavior is a result of unreal classes not able to store python object + # directly on the class due to limitations in the reflection system. + # The expectation is that uclass's that may not be able to store the service + # as a persistent attribute on a class can use the global service instance. + + # BEWARE!!!! + # Due to the nature of the DeadlineService being a singleton, if you get the + # current instance and change the host path for the service, the connection will + # change for every other implementation that uses this service + + deadline_globals = get_editor_deadline_globals() + + if '__deadline_service_instance__' not in deadline_globals: + deadline_globals["__deadline_service_instance__"] = DeadlineService() + + return deadline_globals["__deadline_service_instance__"] diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_utils.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_utils.py new file mode 100644 index 0000000000..1b87e5752e --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/deadline_utils.py @@ -0,0 +1,220 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +""" +General Deadline utility functions +""" +# Built-in +from copy import deepcopy +import json +import re + +import unreal + + +def format_job_info_json_string(json_string, exclude_aux_files=False): + """ + Deadline Data asset returns a json string, load the string and format the job info in a dictionary + :param str json_string: Json string from deadline preset struct + :param bool exclude_aux_files: Excludes the aux files from the returned job info dictionary if True + :return: job Info dictionary + """ + + if not json_string: + raise RuntimeError(f"Expected json string value but got `{json_string}`") + + job_info = {} + + try: + intermediate_info = json.loads(json_string) + except Exception as err: + raise RuntimeError(f"An error occurred formatting the Job Info string. \n\t{err}") + + project_settings = unreal.get_default_object(unreal.DeadlineServiceEditorSettings) + script_category_mappings = project_settings.script_category_mappings + + # The json string keys are camelCased keys which are not the expected input + # types for Deadline. Format the keys to PascalCase. + for key, value in intermediate_info.items(): + + # Remove empty values + if not value: + continue + + # Deadline does not support native boolean so make it a string + if isinstance(value, bool): + value = str(value).lower() + + pascal_case_key = re.sub("(^\S)", lambda string: string.group(1).upper(), key) + + if (pascal_case_key == "AuxFiles") and not exclude_aux_files: + + # The returned json string lists AuxFiles as a list of + # Dictionaries but the expected value is a list of + # strings. reformat this input into the expected value + aux_files = [] + for files in value: + aux_files.append(files["filePath"]) + + job_info[pascal_case_key] = aux_files + + continue + + # Extra option that can be set on the job info are packed inside a + # ExtraJobOptions key in the json string. + # Extract this is and add it as a flat setting in the job info + elif pascal_case_key == "ExtraJobOptions": + job_info.update(value) + + continue + + # Resolve the job script paths to be sent to be sent to the farm. + elif pascal_case_key in ["PreJobScript", "PostJobScript", "PreTaskScript", "PostTaskScript"]: + + # The path mappings in the project settings are a dictionary + # type with the script category as a named path for specifying + # the root directory of a particular script. The User interface + # exposes the category which is what's in the json string. We + # will use this category to look up the actual path mappings in + # the project settings. + script_category = intermediate_info[key]["scriptCategory"] + script_name = intermediate_info[key]["scriptName"] + if script_category and script_name: + job_info[pascal_case_key] = f"{script_category_mappings[script_category]}/{script_name}" + + continue + + # Environment variables for Deadline are numbered key value pairs in + # the form EnvironmentKeyValue#. + # Conform the Env settings to the expected Deadline configuration + elif (pascal_case_key == "EnvironmentKeyValue") and value: + + for index, (env_key, env_value) in enumerate(value.items()): + job_info[f"EnvironmentKeyValue{index}"] = f"{env_key}={env_value}" + + continue + + # ExtraInfoKeyValue for Deadline are numbered key value pairs in the + # form ExtraInfoKeyValue#. + # Conform the setting to the expected Deadline configuration + elif (pascal_case_key == "ExtraInfoKeyValue") and value: + + for index, (env_key, env_value) in enumerate(value.items()): + job_info[f"ExtraInfoKeyValue{index}"] = f"{env_key}={env_value}" + + continue + + else: + # Set the rest of the functions + job_info[pascal_case_key] = value + + # Remove our custom representation of Environment and ExtraInfo Key value + # pairs from the dictionary as the expectation is that these have been + # conformed to deadline's expected key value representation + for key in ["EnvironmentKeyValue", "ExtraInfoKeyValue"]: + job_info.pop(key, None) + + return job_info + + +def format_plugin_info_json_string(json_string): + """ + Deadline Data asset returns a json string, load the string and format the plugin info in a dictionary + :param str json_string: Json string from deadline preset struct + :return: job Info dictionary + """ + + if not json_string: + raise RuntimeError(f"Expected json string value but got `{json_string}`") + + plugin_info = {} + + try: + info = json.loads(json_string) + plugin_info = info["pluginInfo"] + + except Exception as err: + raise RuntimeError(f"An error occurred formatting the Plugin Info string. \n\t{err}") + + # The plugin info is listed under the `plugin_info` key. + # The json string keys are camelCased on struct conversion to json. + return plugin_info + + +def get_deadline_info_from_preset(job_preset=None, job_preset_struct=None): + """ + This method returns the job info and plugin info from a deadline preset + :param unreal.DeadlineJobPreset job_preset: Deadline preset asset + :param unreal.DeadlineJobPresetStruct job_preset_struct: The job info and plugin info in the job preset + :return: Returns a tuple with the job info and plugin info dictionary + :rtype: Tuple + """ + + job_info = {} + plugin_info = {} + preset_struct = None + + # TODO: Make sure the preset library is a loaded asset + if job_preset is not None: + preset_struct = job_preset.job_preset_struct + + if job_preset_struct is not None: + preset_struct = job_preset_struct + + if preset_struct: + # Get the Job Info and plugin Info + try: + job_info = dict(unreal.DeadlineServiceEditorHelpers.get_deadline_job_info(preset_struct)) + + plugin_info = dict(unreal.DeadlineServiceEditorHelpers.get_deadline_plugin_info(preset_struct)) + + # Fail the submission if any errors occur + except Exception as err: + unreal.log_error( + f"Error occurred getting deadline job and plugin details. \n\tError: {err}" + ) + raise + + return job_info, plugin_info + + +def merge_dictionaries(first_dictionary, second_dictionary): + """ + This method merges two dictionaries and returns a new dictionary as a merger between the two + :param dict first_dictionary: The first dictionary + :param dict second_dictionary: The new dictionary to merge in + :return: A new dictionary based on a merger of the input dictionaries + :rtype: dict + """ + # Make sure we do not overwrite our input dictionary + output_dictionary = deepcopy(first_dictionary) + + for key in second_dictionary: + if isinstance(second_dictionary[key], dict): + if key not in output_dictionary: + output_dictionary[key] = {} + output_dictionary[key] = merge_dictionaries(output_dictionary[key], second_dictionary[key]) + else: + output_dictionary[key] = second_dictionary[key] + + return output_dictionary + + +def get_editor_deadline_globals(): + """ + Get global storage that will persist for the duration of the + current interpreter/process. + + .. tip:: + + Please namespace or otherwise ensure unique naming of any data stored + into this dictionary, as key clashes are not handled/safety checked. + + :return: Global storage + :rtype: dict + """ + import __main__ + try: + return __main__.__editor_deadline_globals__ + except AttributeError: + __main__.__editor_deadline_globals__ = {} + return __main__.__editor_deadline_globals__ diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/init_unreal.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/init_unreal.py new file mode 100644 index 0000000000..763d6745bc --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/init_unreal.py @@ -0,0 +1,54 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-in +import sys +from pathlib import Path + +from deadline_utils import get_editor_deadline_globals +from deadline_service import DeadlineService + +# Third-party +import unreal + +plugin_name = "DeadlineService" + + +# Add the actions path to sys path +actions_path = Path(__file__).parent.joinpath("service_actions").as_posix() + +if actions_path not in sys.path: + sys.path.append(actions_path) + +# The asset registry may not be fully loaded by the time this is called, +# warn the user that attempts to look assets up may fail +# unexpectedly. +# Look for a custom commandline start key `-waitonassetregistry`. This key +# is used to trigger a synchronous wait on the asset registry to complete. +# This is useful in commandline states where you explicitly want all assets +# loaded before continuing. +asset_registry = unreal.AssetRegistryHelpers.get_asset_registry() +if asset_registry.is_loading_assets() and ("-waitonassetregistry" in unreal.SystemLibrary.get_command_line().split()): + unreal.log_warning( + f"Asset Registry is still loading. The {plugin_name} plugin will " + f"be loaded after the Asset Registry is complete." + ) + + asset_registry.wait_for_completion() + unreal.log(f"Asset Registry is complete. Loading {plugin_name} plugin.") + +# Create a global instance of the deadline service. This is useful for +# unreal classes that are not able to save the instance as an +# attribute on the class. Because the Deadline Service is a singleton, +# any new instance created from the service module will return the global +# instance +deadline_globals = get_editor_deadline_globals() + +try: + deadline_globals["__deadline_service_instance__"] = DeadlineService() +except Exception as err: + raise RuntimeError(f"An error occurred creating a Deadline service instance. \n\tError: {str(err)}") + +from service_actions import submit_job_action + +# Register the menu from the render queue actions +submit_job_action.register_menu_action() diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/service_actions/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/service_actions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/service_actions/submit_job_action.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/service_actions/submit_job_action.py new file mode 100644 index 0000000000..37106b2e96 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Python/service_actions/submit_job_action.py @@ -0,0 +1,113 @@ +# Copyright Epic Games, Inc. All Rights Reserved + +# Built-in +import argparse +from getpass import getuser + +# Internal +from deadline_service import get_global_deadline_service_instance +from deadline_job import DeadlineJob +from deadline_menus import DeadlineToolBarMenu + +# Third Party +import unreal + +# Editor Utility Widget path +# NOTE: This is very fragile and can break if naming or pathing changes +EDITOR_UTILITY_WIDGET = "/UnrealDeadlineService/Widgets/DeadlineJobSubmitter" + + +def _launch_job_submitter(): + """ + Callback to execute to launch the job submitter + """ + unreal.log("Launching job submitter.") + + submitter_widget = unreal.EditorAssetLibrary.load_asset(EDITOR_UTILITY_WIDGET) + + # Get editor subsystem + subsystem = unreal.get_editor_subsystem(unreal.EditorUtilitySubsystem) + + # Spawn the submitter widget + subsystem.spawn_and_register_tab(submitter_widget) + + +def register_menu_action(): + """ + Creates the toolbar menu + """ + + if not _validate_euw_asset_exists(): + unreal.log_warning( + f"EUW {EDITOR_UTILITY_WIDGET} does not exist in the Asset registry!" + ) + return + + toolbar = DeadlineToolBarMenu() + + toolbar.register_submenu( + "SubmitDeadlineJob", + _launch_job_submitter, + label_name="Submit Deadline Job", + description="Submits a job to Deadline" + ) + + +def _validate_euw_asset_exists(): + """ + Make sure our reference editor utility widget exists in + the asset registry + :returns: Array(AssetData) or None + """ + + asset_registry = unreal.AssetRegistryHelpers.get_asset_registry() + asset_data = asset_registry.get_assets_by_package_name( + EDITOR_UTILITY_WIDGET, + include_only_on_disk_assets=True + ) + + return True if asset_data else False + + +def _execute_submission(args): + """ + Creates and submits a job to Deadline + :param args: Commandline args + """ + + unreal.log("Executing job submission") + + # Create a Deadline job from the selected job preset + deadline_job = DeadlineJob(job_preset=unreal.load_asset(args.job_preset_asset)) + + # If there is no author set, use the current user + if not deadline_job.job_info.get("UserName", None): + deadline_job.job_info = {"UserName": getuser()} + + deadline_service = get_global_deadline_service_instance() + + # Submit the Deadline Job + job_id = deadline_service.submit_job(deadline_job) + + unreal.log(f"Deadline job submitted. JobId: {job_id}") + + +if __name__ == "__main__": + unreal.log("Executing job submitter action") + + parser = argparse.ArgumentParser( + description="Submits a job to Deadline", + add_help=False, + ) + + parser.add_argument( + "--job_preset_asset", + type=str, + help="Deadline Job Preset Asset" + ) + + parser.set_defaults(func=_execute_submission) + + # Parse the arguments and execute the function callback + arguments = parser.parse_args() + arguments.func(arguments) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Widgets/DeadlineJobSubmitter.uasset b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Widgets/DeadlineJobSubmitter.uasset new file mode 100644 index 0000000000..09f2106639 Binary files /dev/null and b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Content/Widgets/DeadlineJobSubmitter.uasset differ diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/DeadlineService.Build.cs b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/DeadlineService.Build.cs new file mode 100644 index 0000000000..bcdf2fd4db --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/DeadlineService.Build.cs @@ -0,0 +1,26 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +using UnrealBuildTool; + +public class DeadlineService : ModuleRules +{ + public DeadlineService(ReadOnlyTargetRules Target) : base(Target) + { + + PrivateDependencyModuleNames.AddRange( + new string[] { + "AssetDefinition", + "Core", + "CoreUObject", + "EditorStyle", + "Engine", + "DeveloperSettings", + "UnrealEd", + "JsonUtilities", + "PropertyEditor", + "SlateCore", + "Slate" + } + ); + } +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineJobPreset.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineJobPreset.cpp new file mode 100644 index 0000000000..cb9a1bad5a --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineJobPreset.cpp @@ -0,0 +1,66 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "DeadlineJobPreset.h" + +#include "DeadlineServiceEditorSettings.h" + +#include "Widgets/Layout/SBorder.h" +#include "Widgets/SBoxPanel.h" + +#include UE_INLINE_GENERATED_CPP_BY_NAME(DeadlineJobPreset) + +DEFINE_LOG_CATEGORY(LogDeadlineDataAsset); +DEFINE_LOG_CATEGORY(LogDeadlineStruct); + +UDeadlineJobPreset::UDeadlineJobPreset() +{ + SetupPluginInfo(); +} + +/** + * Retrieves the path of the executable file, adding the desired variant to the end. + * DesiredExecutableVariant is defined in DeadlineServiceEditorSettings. + * @return A string representing the path of the executable file. + */ +FString GetExecutablePathWithDesiredVariant() +{ + FString ExecutablePath = FPlatformProcess::ExecutablePath(); + FString ExtensionWithDot = FPaths::GetExtension(ExecutablePath, true); + ExecutablePath.RemoveFromEnd(ExtensionWithDot); + FString DesiredExecutableVariant = GetDefault()->DesiredExecutableVariant; + ExecutablePath.RemoveFromEnd(DesiredExecutableVariant); + + TStringBuilder<1024> StringBuilder; + StringBuilder.Append(ExecutablePath); + StringBuilder.Append(DesiredExecutableVariant); + StringBuilder.Append(ExtensionWithDot); + + return StringBuilder.ToString(); +} + +void UDeadlineJobPreset::SetupPluginInfo() +{ + // Set default values good for most users + if (!JobPresetStruct.PluginInfo.FindKey("Executable")) + { + JobPresetStruct.PluginInfo.Add("Executable", GetExecutablePathWithDesiredVariant()); + } + if (!JobPresetStruct.PluginInfo.FindKey("ProjectFile")) + { + FString ProjectPath = FPaths::GetProjectFilePath(); + + if (FPaths::IsRelative(ProjectPath)) + { + if (const FString FullPath = FPaths::ConvertRelativePathToFull(ProjectPath); FPaths::FileExists(FullPath)) + { + ProjectPath = FullPath; + } + } + + JobPresetStruct.PluginInfo.Add("ProjectFile", ProjectPath); + } + if (!JobPresetStruct.PluginInfo.FindKey("CommandLineArguments")) + { + JobPresetStruct.PluginInfo.Add("CommandLineArguments","-log"); + } +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineJobPresetFactory.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineJobPresetFactory.cpp new file mode 100644 index 0000000000..8205595674 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineJobPresetFactory.cpp @@ -0,0 +1,30 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "DeadlineJobPresetFactory.h" + +#include "DeadlineJobPreset.h" + +#include "AssetTypeCategories.h" + +UDeadlineJobPresetFactory::UDeadlineJobPresetFactory() +{ + bCreateNew = true; + bEditAfterNew = false; + bEditorImport = false; + SupportedClass = UDeadlineJobPreset::StaticClass(); +} + + UObject* UDeadlineJobPresetFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +{ + return NewObject(InParent, Class, Name, Flags); +} + +FText UDeadlineJobPresetFactory::GetDisplayName() const +{ + return NSLOCTEXT("AssetTypeActions", "AssetTypeActions_DeadlineJobPreset", "Deadline Job Preset"); +} + +uint32 UDeadlineJobPresetFactory::GetMenuCategories() const +{ + return EAssetTypeCategories::Misc; +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineServiceEditorSettings.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineServiceEditorSettings.cpp new file mode 100644 index 0000000000..6aa12a2838 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineServiceEditorSettings.cpp @@ -0,0 +1,3 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "DeadlineServiceEditorSettings.h" diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineServiceModule.cpp b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineServiceModule.cpp new file mode 100644 index 0000000000..12fe491657 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Private/DeadlineServiceModule.cpp @@ -0,0 +1,7 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#include "DeadlineServiceModule.h" + +#include "Modules/ModuleManager.h" + +IMPLEMENT_MODULE(FDeadlineServiceModule, DeadlineService); diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/AssetDefinition_DeadlineJobPreset.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/AssetDefinition_DeadlineJobPreset.h new file mode 100644 index 0000000000..3315147e24 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/AssetDefinition_DeadlineJobPreset.h @@ -0,0 +1,26 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "DeadlineJobPreset.h" +#include "AssetDefinitionDefault.h" + +#include "AssetDefinition_DeadlineJobPreset.generated.h" + +UCLASS() +class UAssetDefinition_DeadlineJobPreset : public UAssetDefinitionDefault +{ + GENERATED_BODY() + +public: + // UAssetDefinition Begin + virtual FText GetAssetDisplayName() const override { return NSLOCTEXT("AssetTypeActions", "AssetTypeActions_DeadlineJobPreset", "Deadline Job Preset"); } + virtual FLinearColor GetAssetColor() const override { return FLinearColor::Red; } + virtual TSoftClassPtr GetAssetClass() const override { return UDeadlineJobPreset::StaticClass(); } + virtual TConstArrayView GetAssetCategories() const override + { + static const auto Categories = { EAssetCategoryPaths::Misc }; + return Categories; + } + // UAssetDefinition End +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineJobPreset.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineJobPreset.h new file mode 100644 index 0000000000..e9d77308ba --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineJobPreset.h @@ -0,0 +1,230 @@ +// Copyright Epic Games, Inc. All Rights Reserved. +#pragma once + +#include "Engine/DataAsset.h" +#include "DeadlineJobPreset.generated.h" + +// Forward declarations +class UDeadlineJobPreset; +class UScriptCategories; + +DECLARE_LOG_CATEGORY_EXTERN(LogDeadlineDataAsset, Log, All); +DECLARE_LOG_CATEGORY_EXTERN(LogDeadlineStruct, Log, All); + +/** + * Deadline Job Info Struct + */ +USTRUCT(BlueprintType) +struct DEADLINESERVICE_API FDeadlineJobPresetStruct +{ + /** + * If any of these variable names must change for any reason, be sure to update the string literals in the source as well + * such as in DeadlineJobDataAsset.cpp and MoviePipelineDeadline/DeadlineJobPresetCustomization.cpp, et al. + */ + GENERATED_BODY() + + /** Specifies the name of the job. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Description") + FString Name = "Untitled"; + + /** Specifies a comment for the job. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Description", meta = (MultiLine = true)) + FString Comment; + + /** + * Specifies the department that the job belongs to. + * This is simply a way to group jobs together, and does not affect rendering in any way. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Description") + FString Department; + + /** Specifies the pool that the job is being submitted to. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString Pool; + + /** + * Specifies the secondary pool that the job can spread to if machines are available. + * If not specified, the job will not use a secondary pool. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString SecondaryPool; + + /** Specifies the group that the job is being submitted to. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString Group; + + /** Specifies the priority of a job with 0 being the lowest and 100 being the highest unless configured otherwise in Repository Options. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", meta = (ClampMin = 0)) + int32 Priority = 50; + + /** Specifies the time, in seconds, a Worker has to render a task before it times out. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", meta = (ClampMin = 0)) + int32 TaskTimeoutSeconds = 0; + + /** + * If true, a Worker will automatically figure out if it has been rendering too long based on some + * Repository Configuration settings and the render times of previously completed tasks. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + bool bEnableAutoTimeout = false; + + /** Deadline Plugin used to execute the current job. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Plugin") + FString Plugin = TEXT("UnrealEngine5"); + + /** + * Specifies the maximum number of tasks that a Worker can render at a time. + * This is useful for script plugins that support multithreading. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", meta = (ClampMin = 1, ClampMax = 16)) + int32 ConcurrentTasks = 1; + + /** If ConcurrentTasks is greater than 1, setting this to true will ensure that a Worker will not dequeue more tasks than it has processors. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + bool bLimitConcurrentTasksToNumberOfCpus = true; + + /** Specifies the maximum number of machines this job can be rendered on at the same time (0 means unlimited). */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", meta = (ClampMin = 0)) + int32 MachineLimit = 0; + + /** If true, the machine names in MachineList will be avoided. todo */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", DisplayName = "Machine List Is A Deny List") + bool bMachineListIsADenyList = false; + + /** Job machines to use. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString MachineList; + + /** Specifies the limit groups that this job is a member of. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString LimitGroups; + + /** + * Specifies what jobs must finish before this job will resume (default = blank). + * These dependency jobs must be identified using their unique job ID, + * which is outputted after the job is submitted, and can be found in the Monitor in the “Job ID” column. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString JobDependencies; + + /** + * Specifies the frame range of the render job. + * See the Frame List Formatting Options in the Job Submission documentation for more information. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + FString Frames = TEXT("0"); + + /** Specifies how many frames to render per task. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", meta = (ClampMin = 1)) + int32 ChunkSize = 1; + + /** Specifies what should happen to a job after it completes. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options", meta = (GetOptions = "GetOnJobCompleteOptions")) + FString OnJobComplete = "Nothing"; + + /** whether the submitted job should be set to 'suspended' status. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Options") + bool bSubmitJobAsSuspended = false; + + /** Specifies the job’s user. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Advanced Job Options") + FString UserName; + + /** Specifies an optional name to logically group jobs together. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Advanced Job Options") + FString BatchName; + + /** + * Specifies a full path to a python script to execute when the job initially starts rendering. + * Note: + * This location is expected to already be path mapped on the farm else it will fail. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options", meta = (FilePathFilter = "Python files (*.py)|*.py")) + FFilePath PreJobScript; + + /** + * Specifies a full path to a python script to execute when the job completes. + * Note: + * This location is expected to already be path mapped on the farm else it will fail. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options", meta = (FilePathFilter = "Python files (*.py)|*.py")) + FFilePath PostJobScript; + + /** + * Specifies a full path to a python script to execute before each task starts rendering. + * Note: + * This location is expected to already be path mapped on the farm else it will fail. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options", meta = (FilePathFilter = "Python files (*.py)|*.py")) + FFilePath PreTaskScript; + + /** + * Specifies a full path to a python script to execute after each task completes. + * Note: + * This location is expected to already be path mapped on the farm else it will fail. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options", meta = (FilePathFilter = "Python files (*.py)|*.py")) + FFilePath PostTaskScript; + + /** Specifies environment variables to set when the job renders. This is only set in the Deadline environment not the Unreal environment. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options") + TMap EnvironmentKeyValue; + + /** Key Value pair environment variables to set when the job renders. This is only set in the Deadline environment not the Unreal environment. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options") + TMap EnvironmentInfo; + + /** Key-Value pair Job Extra Info keys for storing user data on the job. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options") + TMap ExtraInfoKeyValue; + + /** Replace the Task extra info column names with task extra info value. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options") + bool bOverrideTaskExtraInfoNames = false; + + /** + * Key Value pair Task Extra Info keys for storing deadline info. This is split up into unique + * settings as there is a limited amount of settings + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options") + TMap TaskExtraInfoNames; + + /** Extra Deadline Job options. Note: Match the naming convention on Deadline's Manual Job Submission website for the options. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, AdvancedDisplay, Category = "Advanced Job Options") + TMap ExtraJobOptions; + + /** Deadline Plugin info key value pair. */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Advanced Job Options") + TMap PluginInfo; +}; + + +/** + * Deadline Job Preset + */ +UCLASS(BlueprintType, DontCollapseCategories) +class DEADLINESERVICE_API UDeadlineJobPreset : public UObject +{ + GENERATED_BODY() +public: + + UDeadlineJobPreset(); + + /** Job preset struct */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Job Preset") + FDeadlineJobPresetStruct JobPresetStruct; + + UFUNCTION() + static TArray GetOnJobCompleteOptions() + { + return {"Nothing","Delete","Archive"}; + } + +protected: + + /** + * Sets up the PluginInfo struct for the FDeadlineJobPresetStruct. + */ + void SetupPluginInfo(); + +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineJobPresetFactory.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineJobPresetFactory.h new file mode 100644 index 0000000000..f08266f975 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineJobPresetFactory.h @@ -0,0 +1,23 @@ +// Copyright Epic Games, Inc. All Rights Reserved. +#pragma once + +#include "Factories/Factory.h" + +#include "DeadlineJobPresetFactory.generated.h" + +UCLASS() +class UDeadlineJobPresetFactory : public UFactory +{ + GENERATED_BODY() + +public: + + UDeadlineJobPresetFactory(); + + // Begin UFactory Interface + virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual FText GetDisplayName() const override; + virtual uint32 GetMenuCategories() const override; + // End UFactory Interface +}; + diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceEditorHelpers.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceEditorHelpers.h new file mode 100644 index 0000000000..f54a88fbc0 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceEditorHelpers.h @@ -0,0 +1,137 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "DeadlineJobPreset.h" +#include "DeadlineServiceEditorSettings.h" +#include "Kismet/BlueprintFunctionLibrary.h" + +#include "DeadlineServiceEditorHelpers.generated.h" + +/** +* Using UCLASS instead of a namespace because we need reflection to call from python +*/ +UCLASS() +class DEADLINESERVICE_API UDeadlineServiceEditorHelpers : public UBlueprintFunctionLibrary +{ + GENERATED_BODY() + + /** + * Returns the given Deadline job info struct a TMap, python compatible + * Excludes "PluginInfo". Use GetPluginInfo to collect this separately. + */ + UFUNCTION(BlueprintCallable, Category = "DeadlineService") + static TMap GetDeadlineJobInfo(const FDeadlineJobPresetStruct& JobPresetStruct) + { + TMap ReturnValue = {{"Plugin", "UnrealEngine"}}; + + if (const UDeadlineServiceEditorSettings* Settings = GetDefault()) + { + ReturnValue["Plugin"] = Settings->PluginName; + } + + for (TFieldIterator PropIt(FDeadlineJobPresetStruct::StaticStruct()); PropIt; ++PropIt) + { + const FProperty* Property = *PropIt; + if (!Property) + { + continue; + } + + FName PropertyName = Property->GetFName(); + + // Custom Handlers for specific properties prioritizing UX + if (PropertyName.IsEqual("bSubmitJobAsSuspended")) + { + ReturnValue.Add("InitialStatus", JobPresetStruct.bSubmitJobAsSuspended ? "Suspended" : "Active"); + } + else if (PropertyName.IsEqual("bMachineListIsADenyList")) + { + ReturnValue.Add(JobPresetStruct.bMachineListIsADenyList ? "Denylist" : "Allowlist", JobPresetStruct.MachineList); + } + else if (PropertyName.IsEqual("PreJobScript")) + { + ReturnValue.Add(PropertyName.ToString(), JobPresetStruct.PreJobScript.FilePath); + } + else if (PropertyName.IsEqual("PostJobScript")) + { + ReturnValue.Add(PropertyName.ToString(), JobPresetStruct.PostJobScript.FilePath); + } + else if (PropertyName.IsEqual("PreTaskScript")) + { + ReturnValue.Add(PropertyName.ToString(), JobPresetStruct.PreTaskScript.FilePath); + } + else if (PropertyName.IsEqual("PostTaskScript")) + { + ReturnValue.Add(PropertyName.ToString(), JobPresetStruct.PostTaskScript.FilePath); + } + else if (PropertyName.IsEqual("MachineList") || PropertyName.IsEqual("PluginInfo")) + { + // MachineList is handled above, PluginInfo is handled in a separate function + continue; + } + else if (const FMapProperty* MapProperty = CastField(Property)) + { + // Custom handler for Maps + const void* MapValuePtr = MapProperty->ContainerPtrToValuePtr(&JobPresetStruct); + FScriptMapHelper MapHelper(MapProperty, MapValuePtr); + for (int32 MapSparseIndex = 0; MapSparseIndex < MapHelper.GetMaxIndex(); ++MapSparseIndex) + { + if (MapHelper.IsValidIndex(MapSparseIndex)) + { + const uint8* MapKeyData = MapHelper.GetKeyPtr(MapSparseIndex); + const uint8* MapValueData = MapHelper.GetValuePtr(MapSparseIndex); + + FString KeyDataAsString; + MapHelper.GetKeyProperty()->ExportText_Direct(KeyDataAsString, MapKeyData, MapKeyData, nullptr, PPF_None); + FString ValueDataAsString; + MapHelper.GetValueProperty()->ExportText_Direct(ValueDataAsString, MapValueData, MapValueData, nullptr, PPF_None); + + // Custom support for Extra Job Options. These properties are part of the top level Job Info map + if (PropertyName.IsEqual("ExtraJobOptions")) + { + ReturnValue.Add(*KeyDataAsString, *ValueDataAsString); + } + else + { + FString PropertyNameAsString = FString::Printf(TEXT("%s%d"), *PropertyName.ToString(), MapSparseIndex); + FString PropertyValueAsString = FString::Printf(TEXT("%s=%s"), *KeyDataAsString, *ValueDataAsString); + ReturnValue.Add(PropertyNameAsString, PropertyValueAsString); + } + // UE_LOG(LogTemp, Warning, TEXT("%s: %s"), *PropertyNameAsString, *PropertyValueAsString); + } + } + } + else + { + const void* ValuePtr = Property->ContainerPtrToValuePtr(&JobPresetStruct); + FString PropertyNameAsString = PropertyName.ToString(); + FString PropertyValueAsString; + Property->ExportText_Direct(PropertyValueAsString, ValuePtr, ValuePtr, nullptr, PPF_None); + + if (PropertyValueAsString.TrimStartAndEnd().IsEmpty()) + { + continue; + } + + // Sanitize bool + if (Property->IsA(FBoolProperty::StaticClass())) + { + PropertyNameAsString.RemoveFromStart(TEXT("b"), ESearchCase::CaseSensitive); + PropertyValueAsString = PropertyValueAsString.ToLower(); + } + + ReturnValue.Add(PropertyNameAsString, PropertyValueAsString); + // UE_LOG(LogTemp, Warning, TEXT("%s: %s"), *PropertyNameAsString, *PropertyValueAsString); + } + } + + return ReturnValue; + } + + UFUNCTION(BlueprintCallable, Category = "DeadlineService") + static TMap GetDeadlinePluginInfo(const FDeadlineJobPresetStruct& JobPresetStruct) + { + return JobPresetStruct.PluginInfo; + } +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceEditorSettings.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceEditorSettings.h new file mode 100644 index 0000000000..5e8a74f052 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceEditorSettings.h @@ -0,0 +1,59 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Engine/DeveloperSettings.h" +#include "DeadlineServiceEditorSettings.generated.h" + +/** +* Project-wide settings for the Deadline Service. +*/ +UCLASS(BlueprintType, config = Editor, defaultconfig, meta = (DisplayName = "Deadline Service")) +class DEADLINESERVICE_API UDeadlineServiceEditorSettings : public UDeveloperSettings +{ + GENERATED_BODY() + +public: + + /** Gets the settings container name for the settings, either Project or Editor */ + virtual FName GetContainerName() const override { return FName("Project"); } + /** Gets the category for the settings, some high level grouping like, Editor, Engine, Game...etc. */ + virtual FName GetCategoryName() const override { return FName("Plugins"); } + + /** UObject interface */ + virtual void PostEditChangeProperty(struct FPropertyChangedEvent& PropertyChangedEvent) override + { + Super::PostEditChangeProperty(PropertyChangedEvent); + SaveConfig(); + } + + /** + * Toggle use Deadline command for submission. + * If used Deadline command preempts use of the web service. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + bool bDeadlineCommand = true; + + + /** + * What is the host name for the Deadline Server that the REST API is running on? + * Only needs the host name and port (ie: http://localhost:port) + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FString DeadlineHost; + + /** + * The name of the plugin to load in Deadline. Usually the default is used. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FString PluginName = "UnrealEngine"; + + /** + * If you'd like the plugin to use a separate executable variant when creating a new DeadlineJobPreset, specify it here. + * For example, to use UnrealEditor-Cmd.exe instead of UnrealEditor.exe, specify "-Cmd". + * Leave blank to use no variant. + */ + UPROPERTY(EditAnywhere, BlueprintReadWrite, config, Category = "Deadline") + FString DesiredExecutableVariant = "-Cmd"; + +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceModule.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceModule.h new file mode 100644 index 0000000000..c7291b003d --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceModule.h @@ -0,0 +1,9 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +#pragma once + +#include "Modules/ModuleInterface.h" + +class FDeadlineServiceModule : public IModuleInterface +{ +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceTimerManager.h b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceTimerManager.h new file mode 100644 index 0000000000..e52eea63e0 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/Source/DeadlineService/Public/DeadlineServiceTimerManager.h @@ -0,0 +1,74 @@ +// Copyright Epic Games, Inc. All Rights Reserved + +#pragma once + +#include "Editor.h" +#include "CoreMinimal.h" +#include "UObject/Object.h" +#include "DeadlineServiceTimerManager.generated.h" + +DECLARE_DYNAMIC_MULTICAST_DELEGATE(FOnTimerInterval); + +/** + * A Deadline Service timer class used for executing function calls on an interval. This class + * can be used by other deadline implementations that use the deadline service to get notifications + * when an update timer is executed by the service. + */ +UCLASS(Blueprintable) +class DEADLINESERVICE_API UDeadlineServiceTimerManager : public UObject +{ + GENERATED_BODY() + +public: + /** Multicast Delegate to bind callable functions */ + UPROPERTY(BlueprintAssignable, Category = "Deadline Service Timer Event") + FOnTimerInterval OnTimerIntervalDelegate; + + /** + * Set a timer to execute a delegate. This timer is also used by the deadline service to periodically get updates + * on submitted jobs. This method returns a time handle reference for this function. This handle can be used at a + * later time to stop the timer. + * + * @param TimerInterval Float timer intervals in seconds. Default is 1.0 seconds. + * @param bLoopTimer Determine whether to loop the timer. By default this is true + */ + UFUNCTION(BlueprintCallable, Category = "Deadline Service Timer") + FTimerHandle StartTimer(float TimerInterval=1.0, bool bLoopTimer=true ) + { + + GEditor->GetTimerManager()->SetTimer( + DeadlineServiceTimerHandle, + FTimerDelegate::CreateUObject(this, &UDeadlineServiceTimerManager::OnTimerEvent), + TimerInterval, + bLoopTimer + ); + + return DeadlineServiceTimerHandle; + + } + + /** + * Function to stop the service timer. + * + * @param TimerHandle Timer handle to stop + */ + UFUNCTION(BlueprintCallable, Category = "Deadline Service Timer") + void StopTimer(FTimerHandle TimerHandle) + { + // Stop the timer + GEditor->GetTimerManager()->ClearTimer(TimerHandle); + } + +private: + /** Internal Timer handle */ + FTimerHandle DeadlineServiceTimerHandle; + +protected: + + /**Internal function to broadcast timer delegate on the editor timer interval. */ + UFUNCTION() + void OnTimerEvent() const + { + OnTimerIntervalDelegate.Broadcast(); + } +}; diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/UnrealDeadlineService.uplugin b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/UnrealDeadlineService.uplugin new file mode 100644 index 0000000000..028fad1f07 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealEnginePlugins/UnrealDeadlineService/UnrealDeadlineService.uplugin @@ -0,0 +1,35 @@ +{ + "FileVersion": 1, + "Version": 1, + "VersionName": "1.0", + "FriendlyName": "Unreal Deadline Service", + "Description": "Plugin to interact with Thinkbox Deadline renderfarm. Allows job submissions, queries, and job state updates.", + "Category": "AWS Thinkbox Deadline", + "CreatedBy": "Epic Games, Inc.", + "CreatedByURL" : "http://www.epicgames.com", + "DocsURL": "", + "MarketplaceURL": "", + "SupportURL": "", + "CanContainContent": true, + "IsBetaVersion": true, + "IsExperimentalVersion": false, + "Installed": false, + "Plugins": + [ + { + "Name": "PythonScriptPlugin", + "Enabled": true + }, + { + "Name": "EditorScriptingUtilities", + "Enabled": true + } + ], + "Modules": + [ + { + "Name": "DeadlineService", + "Type" : "UncookedOnly" + } + ] +} diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealSyncUtil.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealSyncUtil.py new file mode 100644 index 0000000000..af0cf796af --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/UnrealSyncUtil.py @@ -0,0 +1,655 @@ +import subprocess +import re +import socket +import os +from xml.sax.handler import property_declaration_handler +import zipfile +import time +import stat +import threading +import platform + +try: + import queue +except ImportError: + import Queue as queue + +""" +Utility tools to sync and build projects in remote machines. +Currently it supports Perforce only, but user can implement other source control system (i.e. git) +""" + + +class UnrealToolError(Exception): + pass + + +class PerforceError(UnrealToolError): + def __init__(self, message): + self.message = message + + def __str__(self): + return repr(self.message) + + +class PerforceArgumentError(PerforceError): + """An exception that is raised when a perforce command is executed but is missing required arguments. + + Attributes: + message -- programmer defined message + """ + + pass + + +class PerforceMissingWorkspaceError(PerforceError): + def __init__(self, hostName, streamName): + self.message = 'Could not find a workspace for stream: "%s" on host: "%s"' % ( + streamName, + hostName, + ) + + +class PerforceMultipleWorkspaceError(PerforceError): + def __init__(self, hostName, streamName, count): + self.message = ( + 'Found multiple(%d) workspaces for stream: "%s" on host: "%s"' + % (count, streamName, hostName) + ) + + +class PerforceResponseError(PerforceError): + def __init__(self, message, command, response): + self.message = '%s. Executed Command: "%s". Got Response: "%s"' % ( + message, + " ".join(command), + response, + ) + + +class PerforceMultipleProjectError(PerforceError): + def __init__(self, path, count): + self.message = 'Found multiple(%d) uproject files with this path: "%s"' % ( + count, + path, + ) + + +class PerforceProjectNotFoundError(PerforceError): + def __init__(self, path): + self.message = 'Could not find a uproject file with this path: "%s"' % (path) + + +class StoppableThread(threading.Thread): + def __init__(self, process, _queue, *args, **kwargs): + super(StoppableThread, self).__init__(*args, **kwargs) + self.stopEvent = threading.Event() + self.process = process + self.queue = _queue + + def stop(self): + self.stopEvent.set() + + def run(self): + while True: + if self.stopEvent.isSet(): + return + try: + for line in iter(self.process.stdout.readline, b""): + self.queue.put(line) + self.process.stdout.close() + except ValueError: + # File most likely closed so stop trying to queue output. + return + + +class PerforceUtils(object): + def __init__(self, stream, gamePath, env): + # The hostname of the perforce server. Defaults to the "P4PORT" Environment Var. + self._serverName = self._FindServerHostName(env) + if not self._serverName: + raise PerforceError('"P4PORT" has not been set in the Slave environment!') + + # The hostname of the local computer. Defaults to the local hostname. + self._localHost = socket.gethostname() + + # Which stream should the perforce commands be executed for. + # Assumes a workspace exists on this machine for that stream. + # (Removing '/' in the end) + self._stream = re.sub("/$", "", stream) # str + + # Store game name so that we can sync project only (not entire stream) + self._gamePath = gamePath + + # The change list that the sync operations should sync to. + self._changelist = 0 # int + + # The workspace the perforce commands should be executed for. + # Can be automatically determined with DetermineClientWorkspace() + self._clientWorkspace = None # str + + # The root on the local machine that the workspace is based out of. + # Can be automatically determined with DetermineClientWorkspace() + self._workspaceRoot = None # str + + # Sync Estimates calculated by DetermineSyncWorkEstimate + self._syncEstimates = [0, 0, 0] # [int,int,int] + self._syncResults = [0, 0, 0] # [int,int, int] + + # Sync entire stream or just game path + self._bSyncAll = False + + # Name of the uproject file + self._uprojectFile = None + + self._env = env + + @property + def workspaceRoot(self): + return self._workspaceRoot + + @property + def changelist(self): + return self._changelist + + @property + def syncEstimates(self): + return tuple(self._syncEstimates) + + @property + def localHost(self): + self._localHost + + @property + def serverName(self): + self._serverName + + @property + def projectRoot(self): + return "%s/%s" % (self._workspaceRoot, self._gamePath) + + @property + def uprojectPath(self): + return "%s/%s" % (self.projectRoot, self._uprojectFile) + + def setChangelist(self, value): + self._changelist = value + + def _FindServerHostName(self, env): + # The hostname of the perforce server. Defaults to the "P4PORT" Environment Var. + # If it's not set, try to find it from 'p4 set' command + if env: + name = env.get("P4PORT") + else: + name = os.getenv("P4PORT") + if name: + return name + output = subprocess.check_output(["p4", "set"]) + for line in output.splitlines(): + m = re.search("(?<=P4PORT=)(.*:\d+)", line) + if m: + return m.group() + + def SetSyncEntireStream(self, bSyncAll): + self._bSyncAll = bSyncAll + + # + # Automatically determine the client workspace by iterating through + # available workspaces for the local host machine + # + # Raises PerforceMultipleWrokspaceError when multiple workspaces are found for this host/stream. + # (i.e. a render host is also artist workstation where one workspace for artist and another for render job) + # This code should be modified to handle the case (i.e. determine by workspace name) + # + def DetermineClientWorkspace(self): + if not self._stream: + raise PerforceArgumentError("stream must be set to retrieve workspaces") + if not self._localHost: + raise PerforceArgumentError( + "localHostName must be set to retrieve workspaces" + ) + + cmd = [ + "p4", + "-ztag", + "-F", + '"%client%,%Root%,%Host%"', + "workspaces", + "-S", + self._stream, + ] + + result = subprocess.check_output(cmd, env=self._env) + print(">>>>result {}".format(result)) + result = result.splitlines() + local_workspaces = [] + + for line in result: + line = str(line).strip() + match = re.search('"(.*),(.*),(.*)"', line) + if match: + workspace, root, host = match.groups() + if host.lower() == self._localHost.lower(): + local_workspaces.append((workspace, root)) + + if not local_workspaces: + raise PerforceMissingWorkspaceError(self._localHost, self._stream) + elif len(local_workspaces) > 1: + raise PerforceMultipleWorkspaceError( + self._localHost, self._stream, len(local_workspaces) + ) + + workspace, root = local_workspaces[0] + print( + "Successfully found perforce workspace: %s on this host: %s" + % (workspace, self._localHost) + ) + self._clientWorkspace = workspace + self._workspaceRoot = root + + def DetermineProjectRoot(self, uprojectFile): + # Find project file from workspaceRoot. If gamePath contains '...', it should try to search the path recursively + # 2023-04-06 18:31:56: 0: PYTHON: {'self': , 'uprojectFile': u'DLFarmTests.uproject', 'cmd': ['p4', '-p', '10.10.10.162:1666', '-c', 'DLFarmTests_bepic-devtop01', 'files', u'//dl-farm-test/mainline///DLFarmTests.uproject'], 'result': [], 'search_path': u'//dl-farm-test/mainline///DLFarmTests.uproject'} + + if not self._gamePath: + search_path = self._stream + "/" + uprojectFile + else: + search_path = self._stream + "/" + self._gamePath + "/" + uprojectFile + cmd = self.GetP4CommandPrefix() + ["files", search_path] + result = subprocess.check_output(cmd, env=self._env) + result = result.splitlines() + + if len(result) == 0: + raise PerforceProjectNotFoundError(search_path) + elif len(result) > 1: + raise PerforceMultipleProjectError(search_path, len(result)) + result = result[0] + # m = re.search("%s/(.*)/%s" % (self._stream, uprojectFile), str(result)) + m = re.search("%s/.*/?%s#.*" % (self._stream, uprojectFile), str(result)) + if not m: + raise PerforceError("Unable to parse project path: %s" % str(result)) + + # self._gamePath = m.group(1) + self._uprojectFile = uprojectFile + print("ProjectRoot: %s" % self.projectRoot) + + def DetermineLatestChangelist(self): + + sync_path = self._stream + if not self._bSyncAll: + sync_path = self._stream + + # Default to no cl so that if one of the below checks fails it still gets latest. + self._changelist = 0 + latest_cl_command = self.GetP4CommandPrefix() + [ + "changes", + "-m1", + sync_path + "/...", + ] + print("Determining latest CL using: " + " ".join(latest_cl_command)) + + info = subprocess.STARTUPINFO() + info.dwFlags |= subprocess.STARTF_USESHOWWINDOW + + # proc = subprocess.Popen(latest_cl_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=info) + # result = proc.stdout.readline().strip() + + result = subprocess.check_output(latest_cl_command, startupinfo=info, + env=self._env) + + print("Result: {}".format(result)) + if not result.startswith("Change "): + raise PerforceResponseError( + "Failed to get latest changelist for stream", latest_cl_command, result + ) + + clTest = re.search("(?<=Change )(\S*)(?= )", result) + if clTest is None: + raise PerforceResponseError( + "Failed to parse response for latest changelist", + latest_cl_command, + result, + ) + + self._changelist = int(clTest.group()) + print("Changelist set: %d" % self._changelist) + + def DetermineSyncWorkEstimate(self, bForceSync=False): + # Get an estimate on how much syncing there is to do. + sync_estimate_command = self._BuildSyncCommand( + bForceSync=bForceSync, bDryRun=True + ) + info = subprocess.STARTUPINFO() + info.dwFlags |= subprocess.STARTF_USESHOWWINDOW + result = subprocess.check_output(sync_estimate_command, + startupinfo=info, + env=self._env) + + print(f"Sync Estimate Result: {result}") + + estimate_success = False + lines = result.splitlines() + for line in lines: + # Should return in the form "Server network estimates: files added/updated/deleted=x/y/z, bytes..." + estimateResult = re.search("(?<=deleted=)(\S*)(?=,)", str(line)) + if estimateResult: + estimate_success = True + estimates = list(map(int, estimateResult.group().split("/"))) + self._syncEstimates[0] += estimates[0] # added + self._syncEstimates[1] += estimates[1] # updated + self._syncEstimates[2] += estimates[2] # deleted + + if not estimate_success: + self._syncEstimates = [ + -1, + -1, + -1, + ] # Progress will be wrong but no need to crash over it. Don't use 0 here because 0 is a valid estimate (already sync'd) + raise PerforceResponseError( + "Failed to get estimated work for sync operation.", + sync_estimate_command, + result, + ) + + def CleanWorkspace(self): + sync_path = self._stream + if not self._bSyncAll: + sync_path = self._stream + "/" + self._gamePath + + clean_command = self.GetP4CommandPrefix() + [ + "clean", + "-e", + "-a", + "-d", + "-m", + sync_path + "/...", + ] # "]#, "-m"] + print("Cleaning using: " + " ".join(clean_command)) + + result = "" + try: + result = subprocess.check_output(clean_command, + env=self._env) + except subprocess.CalledProcessError as e: + print("Clean: %s" % str(e)) + + print("Clean Result: " + result) + + # Build a perforce sync command based on the options + def _BuildSyncCommand(self, bForceSync=False, bDryRun=False): + + sync_files = [] + if self._bSyncAll: + sync_files.append("") + else: + sync_files.append("%s/..." % (self._stream)) + + if self._changelist > 0: + for i in range(len(sync_files)): + sync_files[i] += "@%d" % self._changelist + + sync_command = self.GetP4CommandPrefix() + ["sync"] + if bDryRun: + sync_command.append("-N") + else: + sync_command.append("--parallel=threads=8") + + if bForceSync: + sync_command.append("-f") + + sync_command.extend(sync_files) + + return sync_command + + def Sync(self, progressCallback=None, bForceSync=False): + syncCommand = self._BuildSyncCommand(bForceSync=bForceSync, bDryRun=False) + + print("Sync Command: " + " ".join(syncCommand)) + + self._syncResults = [0, 0, 0] + + process = subprocess.Popen( + syncCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + env=self._env + ) + + stdoutQueue = queue.Queue() + # stdoutThread = threading.Thread(target=queueStdout, args=(process, stdoutQueue)) + stdoutThread = StoppableThread(process, stdoutQueue) + stdoutThread.daemon = True + stdoutThread.start() + + while process.poll() is None: + while not stdoutQueue.empty(): + stdOutLine = stdoutQueue.get_nowait() + print(stdOutLine) + stdOutLine = str(stdOutLine) + + if ( + "The system cannot find the file specified." in stdOutLine + or "There is not enough space on the disk." in stdOutLine + ): + raise IOError( + 'Suspected Out of Disk Error while syncing: "%s"' % stdOutLine + ) + + # Look for either "deleted", "updated", or "added" and add to our results array. + if "added" in stdOutLine: + self._syncResults[0] += 1 + if "updated" in stdOutLine: + self._syncResults[1] += 1 + if "refreshing" in stdOutLine: + self._syncResults[ + 1 + ] += ( + 1 # This is a guess that refreshing in a full sync is the same. + ) + if "deleted" in stdOutLine: + self._syncResults[2] += 1 + + if progressCallback is not None: + progressCallback(self) + + print("process.poll returned a code, sync finished. Calling Stop") + stdoutThread.stop() + print("called stop. calling join.") + stdoutThread.join() + print("called join.") + + # One more progress callback to ensure we're at 1.0 + if progressCallback is not None: + progressCallback(1) + + # Generate the prefix for perforce commands that need user/workspace for scope. + def GetP4CommandPrefix(self): # -> str[] + return ["p4", "-p", self._serverName, "-c", self._clientWorkspace] + + # Get the sync progress for the current or last sync (Range: 0-1) + def GetSyncProgress(self): # -> float + # Totals + total_operations_est = float( + self._syncEstimates[0] + self._syncEstimates[1] + self._syncEstimates[2] + ) + total_operations = float( + self._syncResults[0] + self._syncResults[1] + self._syncResults[2] + ) + + if total_operations > 0: + return total_operations / total_operations_est + + return 0 + + +class BuildUtils(object): + def __init__(self, engineRoot, uprojectPath, editorName): + + self.engineRoot = engineRoot.replace("\\", "/") + self.uprojectPath = uprojectPath.replace("\\", "/") + self.editorName = editorName + print("engine_root: %s" % self.engineRoot) + print("uproject_path: %s" % self.uprojectPath) + print("editor_name: %s" % self.editorName) + + def IsSourceBuildEngine(self): + items = os.listdir(self.engineRoot) + items = [ + item for item in items if re.search("GenerateProjectFiles", item, re.I) + ] + return len(items) > 0 + + def IsCppProject(self): + project_root = os.path.dirname(self.uprojectPath) + items = os.listdir(project_root) + items = [item for item in items if re.search("Source", item, re.I)] + return len(items) > 0 + + def GetGenerateProjectFileProgram(self): + system = platform.system() + + if system == "Windows": + paths = [ + os.path.join(self.engineRoot, "GenerateProjectFiles.bat"), + os.path.join( + self.engineRoot, + "Engine", + "Build", + "BatchFiles", + "GenerateProjectFiles.bat", + ), + os.path.join( + self.engineRoot, + "Engine", + "Binaries", + "DotNET", + "UnrealBuildTool.exe", + ), + os.path.join( + self.engineRoot, + "Engine", + "Binaries", + "DotNET", + "UnrealBuildTool", + "UnrealBuildTool.exe", + ), + ] + + elif system == "Linux": + paths = [ + os.path.join(self.engineRoot, "GenerateProjectFiles.sh"), + os.path.join( + self.engineRoot, + "Engine", + "Build", + "BatchFiles", + "Linux", + "GenerateProjectFiles.sh", + ), + ] + + elif system == "Darwin": + paths = [ + os.path.join(self.engineRoot, "GenerateProjectFiles.sh"), + os.path.join( + self.engineRoot, + "Engine", + "Build", + "BatchFiles", + "Mac", + "GenerateProjectFiles.sh", + ), + ] + else: + raise RuntimeError("Platform not supported: %s" % system) + + for path in paths: + if os.path.exists(path): + return path + raise RuntimeError("Failed to find program to generate project files") + + def GetBuildProgram(self): + system = platform.system() + if system == "Windows": + return os.path.join( + self.engineRoot, "Engine", "Build", "BatchFiles", "Build.bat" + ) + elif system == "Linux": + return os.path.join( + self.engineRoot, "Engine", "Build", "BatchFiles", "Linux", "Build.sh" + ) + elif system == "Darwin": + return os.path.join( + self.engineRoot, "Engine", "Build", "BatchFiles", "Mac", "Build.sh" + ) + else: + raise RuntimeError("Platform not supported: %s" % system) + + def GetBuildArgs(self): + system = platform.system() + if system == "Windows": + system = "Win64" + elif system == "Darwin": + system = "Mac" + + args = [system, "Development", "-NoHotReloadFromIDE", "-progress"] + return args + + def GetEditorBuildArgs(self): + system = platform.system() + if system == "Windows": + system = "Win64" + elif system == "Darwin": + system = "Mac" + + args = [ + system, + "Development", + self.uprojectPath.encode("utf-8"), + "-NoHotReloadFromIDE", + "-progress", + ] + return args + + def GenerateProjectFiles(self): + program = self.GetGenerateProjectFileProgram().replace("\\", "/") + args = [program] + if re.search("UnrealBuildTool", program.split("/")[-1]): + args.append("-ProjectFiles") + + args.append(self.uprojectPath) + args.append("-progress") + + print("Generating Project Files with: %s" % " ".join(args)) + try: + process = subprocess.check_output(args, env=self._env) + except subprocess.CalledProcessError as e: + print( + "Exception while generating project files: %s (%s)" % (str(e), e.output) + ) + raise + print("Generated Project Files.") + + def BuildBuildTargets(self): + print("Starting to build targets...") + + build_targets = [] + if self.IsSourceBuildEngine(): + build_targets.append(("UnrealHeaderTool", self.GetBuildArgs())) + build_targets.append(("ShaderCompileWorker", self.GetBuildArgs())) + build_targets.append(("CrashReportClient", self.GetBuildArgs())) + build_targets.append(("UnrealLightmass", self.GetBuildArgs())) + + build_targets.append( + (self.editorName.encode("utf-8"), self.GetEditorBuildArgs()) + ) + program = self.GetBuildProgram().replace("\\", "/") + for target, buildArgs in build_targets: + args = [program, target] + buildArgs + print("Compiling {}...".format(target)) + print("Command Line: %s" % str(args)) + try: + process = subprocess.check_output(args, env=self._env) + except subprocess.CalledProcessError as e: + print("Exception while building target: %s (%s)" % (str(e), e.output)) + raise + + print("Finished building targets.") diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/__init__.py new file mode 100644 index 0000000000..4857450af2 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/__init__.py @@ -0,0 +1 @@ +# Copyright Epic Games, Inc. All Rights Reserved diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/__init__.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/__init__.py new file mode 100644 index 0000000000..ce55d23926 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/__init__.py @@ -0,0 +1,9 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from . import client, factory + +__all__ = [ + "client", + "factory" +] diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/base_server.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/base_server.py new file mode 100644 index 0000000000..3e0f87babb --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/base_server.py @@ -0,0 +1,275 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import os +import sys +import abc +import queue +import time +import logging +import threading +from xmlrpc.server import SimpleXMLRPCServer + +# importlib machinery needs to be available for importing client modules +from importlib.machinery import SourceFileLoader + +logger = logging.getLogger(__name__) + +EXECUTION_QUEUE = queue.Queue() +RETURN_VALUE_NAME = 'RPC_SERVER_RETURN_VALUE' +ERROR_VALUE_NAME = 'RPC_SERVER_ERROR_VALUE' + + +def run_in_main_thread(callable_instance, *args): + """ + Runs the provided callable instance in the main thread by added it to a que + that is processed by a recurring event in an integration like a timer. + + :param call callable_instance: A callable. + :return: The return value of any call from the client. + """ + timeout = int(os.environ.get('RPC_TIME_OUT', 20)) + + globals().pop(RETURN_VALUE_NAME, None) + globals().pop(ERROR_VALUE_NAME, None) + EXECUTION_QUEUE.put((callable_instance, args)) + + for attempt in range(timeout * 10): + if RETURN_VALUE_NAME in globals(): + return globals().get(RETURN_VALUE_NAME) + elif ERROR_VALUE_NAME in globals(): + raise globals()[ERROR_VALUE_NAME] + else: + time.sleep(0.1) + + if RETURN_VALUE_NAME not in globals(): + raise TimeoutError( + f'The call "{callable_instance.__name__}" timed out because it hit the timeout limit' + f' of {timeout} seconds.' + ) + + +def execute_queued_calls(*extra_args): + """ + Runs calls in the execution que till they are gone. Designed to be passed to a + recurring event in an integration like a timer. + """ + while not EXECUTION_QUEUE.empty(): + if RETURN_VALUE_NAME not in globals(): + callable_instance, args = EXECUTION_QUEUE.get() + try: + globals()[RETURN_VALUE_NAME] = callable_instance(*args) + except Exception as error: + # store the error in the globals and re-raise it + globals()[ERROR_VALUE_NAME] = error + raise error + + +class BaseServer(SimpleXMLRPCServer): + def serve_until_killed(self): + """ + Serves till killed by the client. + """ + self.quit = False + while not self.quit: + self.handle_request() + + +class BaseRPCServer: + def __init__(self, name, port, is_thread=False): + """ + Initialize the base server. + + :param str name: The name of the server. + :param int port: The number of the server port. + :param bool is_thread: Whether or not the server is encapsulated in a thread. + """ + self.server = BaseServer( + (os.environ.get('RPC_HOST', '127.0.0.1'), port), + logRequests=False, + allow_none=True + ) + self.is_thread = is_thread + self.server.register_function(self.add_new_callable) + self.server.register_function(self.kill) + self.server.register_function(self.is_running) + self.server.register_function(self.set_env) + self.server.register_introspection_functions() + self.server.register_multicall_functions() + logger.info(f'Started RPC server "{name}".') + + @staticmethod + def is_running(): + """ + Responds if the server is running. + """ + return True + + @staticmethod + def set_env(name, value): + """ + Sets an environment variable in the server's python environment. + + :param str name: The name of the variable. + :param str value: The value. + """ + os.environ[name] = str(value) + + def kill(self): + """ + Kill the running server from the client. Only if running in blocking mode. + """ + self.server.quit = True + return True + + def add_new_callable(self, callable_name, code, client_system_path, remap_pairs=None): + """ + Adds a new callable defined in the client to the server. + + :param str callable_name: The name of the function that will added to the server. + :param str code: The code of the callable that will be added to the server. + :param list[str] client_system_path: The list of python system paths from the client. + :param list(tuple) remap_pairs: A list of tuples with first value being the client python path root and the + second being the new server path root. This can be useful if the client and server are on two different file + systems and the root of the import paths need to be dynamically replaced. + :return str: A response message back to the client. + """ + for path in client_system_path: + # if a list of remap pairs are provided, they will be remapped before being added to the system path + for client_path_root, matching_server_path_root in remap_pairs or []: + if path.startswith(client_path_root): + path = os.path.join( + matching_server_path_root, + path.replace(client_path_root, '').replace(os.sep, '/').strip('/') + ) + + if path not in sys.path: + sys.path.append(path) + + # run the function code + exec(code) + callable_instance = locals().copy().get(callable_name) + + # grab it from the locals and register it with the server + if callable_instance: + if self.is_thread: + self.server.register_function( + self.thread_safe_call(callable_instance), + callable_name + ) + else: + self.server.register_function( + callable_instance, + callable_name + ) + return f'The function "{callable_name}" has been successfully registered with the server!' + + +class BaseRPCServerThread(threading.Thread, BaseRPCServer): + def __init__(self, name, port): + """ + Initialize the base rpc server. + + :param str name: The name of the server. + :param int port: The number of the server port. + """ + threading.Thread.__init__(self, name=name, daemon=True) + BaseRPCServer.__init__(self, name, port, is_thread=True) + + def run(self): + """ + Overrides the run method. + """ + self.server.serve_forever() + + @abc.abstractmethod + def thread_safe_call(self, callable_instance, *args): + """ + Implements thread safe execution of a call. + """ + return + + +class BaseRPCServerManager: + @abc.abstractmethod + def __init__(self): + """ + Initialize the server manager. + Note: when this class is subclassed `name`, `port`, `threaded_server_class` need to be defined. + """ + self.server_thread = None + self.server_blocking = None + self._server = None + + def start_server_thread(self): + """ + Starts the server in a thread. + """ + self.server_thread = self.threaded_server_class(self.name, self.port) + self._server = self.server_thread.server + self.server_thread.start() + + def start_server_blocking(self): + """ + Starts the server in the main thread, which blocks all other processes. This can only + be killed by the client. + """ + self.server_blocking = BaseRPCServer(self.name, self.port) + self._server = self.server_blocking.server + self._server.serve_until_killed() + + def start(self, threaded=True): + """ + Starts the server. + + :param bool threaded: Whether or not to start the server in a thread. If not threaded + it will block all other processes. + """ + # start the server in a thread + if threaded and not self.server_thread: + self.start_server_thread() + + # start the blocking server + elif not threaded and not self.server_blocking: + self.start_server_blocking() + + else: + logger.info(f'RPC server "{self.name}" is already running...') + + def is_running(self): + """ + Checks to see if a blocking or threaded RPC server is still running + """ + if self._server: + try: + return self._server.is_running() + except (AttributeError, RuntimeError, Exception): + return False + + return False + + def get_server(self): + """ + Returns the rpc server running. This is useful when executing in a + thread and not blocking + """ + if not self._server: + raise RuntimeError("There is no server configured for this Manager") + + return self._server + + def shutdown(self): + """ + Shuts down the server. + """ + if self.server_thread: + logger.info(f'RPC server "{self.name}" is shutting down...') + + # kill the server in the thread + if self._server: + self._server.shutdown() + self._server.server_close() + + self.server_thread.join() + + logger.info(f'RPC server "{self.name}" has shutdown.') diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/client.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/client.py new file mode 100644 index 0000000000..9ced182b0d --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/client.py @@ -0,0 +1,106 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import os +import re +import logging +import inspect +from xmlrpc.client import ( + ServerProxy, + Unmarshaller, + Transport, + ExpatParser, + Fault, + ResponseError +) +logger = logging.getLogger(__package__) + + +class RPCUnmarshaller(Unmarshaller): + def __init__(self, *args, **kwargs): + Unmarshaller.__init__(self, *args, **kwargs) + self.error_pattern = re.compile(r'(?P[^:]*):(?P.*$)') + self.builtin_exceptions = self._get_built_in_exceptions() + + @staticmethod + def _get_built_in_exceptions(): + """ + Gets a list of the built in exception classes in python. + + :return list[BaseException] A list of the built in exception classes in python: + """ + builtin_exceptions = [] + for builtin_name, builtin_class in globals().get('__builtins__').items(): + if inspect.isclass(builtin_class) and issubclass(builtin_class, BaseException): + builtin_exceptions.append(builtin_class) + + return builtin_exceptions + + def close(self): + """ + Override so we redefine the unmarshaller. + + :return tuple: A tuple of marshallables. + """ + if self._type is None or self._marks: + raise ResponseError() + + if self._type == 'fault': + marshallables = self._stack[0] + match = self.error_pattern.match(marshallables.get('faultString', '')) + if match: + exception_name = match.group('exception').strip("") + exception_message = match.group('exception_message') + + if exception_name: + for exception in self.builtin_exceptions: + if exception.__name__ == exception_name: + raise exception(exception_message) + + # if all else fails just raise the fault + raise Fault(**marshallables) + return tuple(self._stack) + + +class RPCTransport(Transport): + def getparser(self): + """ + Override so we can redefine our transport to use its own custom unmarshaller. + + :return tuple(ExpatParser, RPCUnmarshaller): The parser and unmarshaller instances. + """ + unmarshaller = RPCUnmarshaller() + parser = ExpatParser(unmarshaller) + return parser, unmarshaller + + +class RPCServerProxy(ServerProxy): + def __init__(self, *args, **kwargs): + """ + Override so we can redefine the ServerProxy to use our custom transport. + """ + kwargs['transport'] = RPCTransport() + ServerProxy.__init__(self, *args, **kwargs) + + +class RPCClient: + def __init__(self, port, marshall_exceptions=True): + """ + Initializes the rpc client. + + :param int port: A port number the client should connect to. + :param bool marshall_exceptions: Whether or not the exceptions should be marshalled. + """ + if marshall_exceptions: + proxy_class = RPCServerProxy + else: + proxy_class = ServerProxy + + server_ip = os.environ.get('RPC_SERVER_IP', '127.0.0.1') + + self.proxy = proxy_class( + "http://{server_ip}:{port}".format(server_ip=server_ip, port=port), + allow_none=True, + ) + self.marshall_exceptions = marshall_exceptions + self.port = port diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/exceptions.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/exceptions.py new file mode 100644 index 0000000000..0e98d15244 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/exceptions.py @@ -0,0 +1,81 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +class BaseRPCException(Exception): + """ + Raised when a rpc class method is not authored as a static method. + """ + def __init__(self, message=None, line_link=''): + self.message = message + line_link + super().__init__(self.message) + + +class InvalidClassMethod(BaseRPCException): + """ + Raised when a rpc class method is not authored as a static method. + """ + def __init__(self, cls, method, message=None, line_link=''): + self.message = message + + if message is None: + self.message = ( + f'\n {cls.__name__}.{method.__name__} is not a static method. Please decorate with @staticmethod.' + ) + BaseRPCException.__init__(self, self.message, line_link) + + +class InvalidTestCasePort(BaseRPCException): + """ + Raised when a rpc test case class does not have a port defined. + """ + def __init__(self, cls, message=None, line_link=''): + self.message = message + + if message is None: + self.message = f'\n You must set {cls.__name__}.port to a supported RPC port.' + BaseRPCException.__init__(self, self.message, line_link) + + +class InvalidKeyWordParameters(BaseRPCException): + """ + Raised when a rpc function has key word arguments in its parameters. + """ + def __init__(self, function, kwargs, message=None, line_link=''): + self.message = message + + if message is None: + self.message = ( + f'\n Keyword arguments "{kwargs}" were found on "{function.__name__}". The RPC client does not ' + f'support key word arguments . Please change your code to use only arguments.' + ) + BaseRPCException.__init__(self, self.message, line_link) + + +class UnsupportedArgumentType(BaseRPCException): + """ + Raised when a rpc function's argument type is not supported. + """ + def __init__(self, function, arg, supported_types, message=None, line_link=''): + self.message = message + + if message is None: + self.message = ( + f'\n "{function.__name__}" has an argument of type "{arg.__class__.__name__}". The only types that are' + f' supported by the RPC client are {[supported_type.__name__ for supported_type in supported_types]}.' + ) + BaseRPCException.__init__(self, self.message, line_link) + + +class FileNotSavedOnDisk(BaseRPCException): + """ + Raised when a rpc function is called in a context where it is not a saved file on disk. + """ + def __init__(self, function, message=None): + self.message = message + + if message is None: + self.message = ( + f'\n "{function.__name__}" is not being called from a saved file. The RPC client does not ' + f'support code that is not saved. Please save your code to a file on disk and re-run it.' + ) + BaseRPCException.__init__(self, self.message) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/factory.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/factory.py new file mode 100644 index 0000000000..7063c15d6e --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/factory.py @@ -0,0 +1,252 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import os +import re +import sys +import logging +import types +import inspect +import textwrap +import unittest +from xmlrpc.client import Fault + +from .client import RPCClient +from .validations import ( + validate_key_word_parameters, + validate_class_method, + get_source_file_path, + get_line_link, + validate_arguments, + validate_file_is_saved, +) + +logger = logging.getLogger(__package__) + + +class RPCFactory: + def __init__(self, rpc_client, remap_pairs=None, default_imports=None): + self.rpc_client = rpc_client + self.file_path = None + self.remap_pairs = remap_pairs + self.default_imports = default_imports or [] + + def _get_callstack_references(self, code, function): + """ + Gets all references for the given code. + + :param list[str] code: The code of the callable. + :param callable function: A callable. + :return str: The new code of the callable with all its references added. + """ + import_code = self.default_imports + + client_module = inspect.getmodule(function) + self.file_path = get_source_file_path(function) + + # if a list of remap pairs have been set, the file path will be remapped to the new server location + # Note: The is useful when the server and client are not on the same machine. + server_module_path = self.file_path + for client_path_root, matching_server_path_root in self.remap_pairs or []: + if self.file_path.startswith(client_path_root): + server_module_path = os.path.join( + matching_server_path_root, + self.file_path.replace(client_path_root, '').replace(os.sep, '/').strip('/') + ) + break + + for key in dir(client_module): + for line_number, line in enumerate(code): + if line.startswith('def '): + continue + + if key in re.split('\.|\(| ', line.strip()): + if os.path.basename(self.file_path) == '__init__.py': + base_name = os.path.basename(os.path.dirname(self.file_path)) + else: + base_name = os.path.basename(self.file_path) + + module_name, file_extension = os.path.splitext(base_name) + import_code.append( + f'{module_name} = SourceFileLoader("{module_name}", r"{server_module_path}").load_module()' + ) + import_code.append(f'from {module_name} import {key}') + break + + return textwrap.indent('\n'.join(import_code), ' ' * 4) + + def _get_code(self, function): + """ + Gets the code from a callable. + + :param callable function: A callable. + :return str: The code of the callable. + """ + code = textwrap.dedent(inspect.getsource(function)).split('\n') + code = [line for line in code if not line.startswith('@')] + + # get import code and insert them inside the function + import_code = self._get_callstack_references(code, function) + code.insert(1, import_code) + + # log out the generated code + if os.environ.get('RPC_LOG_CODE'): + for line in code: + logger.debug(line) + + return code + + def _register(self, function): + """ + Registers a given callable with the server. + + :param callable function: A callable. + :return Any: The return value. + """ + code = self._get_code(function) + try: + # if additional paths are explicitly set, then use them. This is useful with the client is on another + # machine and the python paths are different + additional_paths = list(filter(None, os.environ.get('RPC_ADDITIONAL_PYTHON_PATHS', '').split(','))) + + if not additional_paths: + # otherwise use the current system path + additional_paths = sys.path + + response = self.rpc_client.proxy.add_new_callable( + function.__name__, '\n'.join(code), + additional_paths + ) + if os.environ.get('RPC_DEBUG'): + logger.debug(response) + + except ConnectionRefusedError: + server_name = os.environ.get(f'RPC_SERVER_{self.rpc_client.port}', self.rpc_client.port) + raise ConnectionRefusedError(f'No connection could be made with "{server_name}"') + + def run_function_remotely(self, function, args): + """ + Handles running the given function on remotely. + + :param callable function: A function reference. + :param tuple(Any) args: The function's arguments. + :return callable: A remote callable. + """ + validate_arguments(function, args) + + # get the remote function instance + self._register(function) + remote_function = getattr(self.rpc_client.proxy, function.__name__) + + current_frame = inspect.currentframe() + outer_frame_info = inspect.getouterframes(current_frame) + # step back 2 frames in the callstack + caller_frame = outer_frame_info[2][0] + # create a trace back that is relevant to the remote code rather than the code transporting it + call_traceback = types.TracebackType(None, caller_frame, caller_frame.f_lasti, caller_frame.f_lineno) + # call the remote function + if not self.rpc_client.marshall_exceptions: + # if exceptions are not marshalled then receive the default Faut + return remote_function(*args) + + # otherwise catch them and add a line link to them + try: + return remote_function(*args) + except Exception as exception: + stack_trace = str(exception) + get_line_link(function) + if isinstance(exception, Fault): + raise Fault(exception.faultCode, exception.faultString) + raise exception.__class__(stack_trace).with_traceback(call_traceback) + + +def remote_call(port, default_imports=None, remap_pairs=None): + """ + A decorator that makes this function run remotely. + + :param Enum port: The name of the port application i.e. maya, blender, unreal. + :param list[str] default_imports: A list of import commands that include modules in every call. + :param list(tuple) remap_pairs: A list of tuples with first value being the client file path root and the + second being the matching server path root. This can be useful if the client and server are on two different file + systems and the root of the import paths need to be dynamically replaced. + """ + def decorator(function): + def wrapper(*args, **kwargs): + validate_file_is_saved(function) + validate_key_word_parameters(function, kwargs) + rpc_factory = RPCFactory( + rpc_client=RPCClient(port), + remap_pairs=remap_pairs, + default_imports=default_imports + ) + return rpc_factory.run_function_remotely(function, args) + return wrapper + return decorator + + +def remote_class(decorator): + """ + A decorator that makes this class run remotely. + + :param remote_call decorator: The remote call decorator. + :return: A decorated class. + """ + def decorate(cls): + for attribute, value in cls.__dict__.items(): + validate_class_method(cls, value) + if callable(getattr(cls, attribute)): + setattr(cls, attribute, decorator(getattr(cls, attribute))) + return cls + return decorate + + +class RPCTestCase(unittest.TestCase): + """ + Subclasses unittest.TestCase to implement a RPC compatible TestCase. + """ + port = None + remap_pairs = None + default_imports = None + + @classmethod + def run_remotely(cls, method, args): + """ + Run the given method remotely. + + :param callable method: A method to wrap. + """ + default_imports = cls.__dict__.get('default_imports', None) + port = cls.__dict__.get('port', None) + remap_pairs = cls.__dict__.get('remap_pairs', None) + rpc_factory = RPCFactory( + rpc_client=RPCClient(port), + default_imports=default_imports, + remap_pairs=remap_pairs + ) + return rpc_factory.run_function_remotely(method, args) + + def _callSetUp(self): + """ + Overrides the TestCase._callSetUp method by passing it to be run remotely. + Notice None is passed as an argument instead of self. This is because only static methods + are allowed by the RPCClient. + """ + self.run_remotely(self.setUp, [None]) + + def _callTearDown(self): + """ + Overrides the TestCase._callTearDown method by passing it to be run remotely. + Notice None is passed as an argument instead of self. This is because only static methods + are allowed by the RPCClient. + """ + # notice None is passed as an argument instead of self so self can't be used + self.run_remotely(self.tearDown, [None]) + + def _callTestMethod(self, method): + """ + Overrides the TestCase._callTestMethod method by capturing the test case method that would be run and then + passing it to be run remotely. Notice no arguments are passed. This is because only static methods + are allowed by the RPCClient. + + :param callable method: A method from the test case. + """ + self.run_remotely(method, []) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/server.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/server.py new file mode 100644 index 0000000000..6bc1451794 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/server.py @@ -0,0 +1,32 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import os +import sys +sys.path.append(os.path.dirname(__file__)) + +from base_server import BaseRPCServerManager, BaseRPCServerThread + + +class RPCServerThread(BaseRPCServerThread): + def thread_safe_call(self, callable_instance, *args): + """ + Implementation of a thread safe call in Unreal. + """ + return callable_instance(*args) + + +class RPCServer(BaseRPCServerManager): + def __init__(self, port=None): + """ + Initialize the blender rpc server, with its name and specific port. + """ + super(RPCServer, self).__init__() + self.name = 'RPCServer' + self.port = int(os.environ.get('RPC_PORT', port)) + self.threaded_server_class = RPCServerThread + + +if __name__ == '__main__': + rpc_server = RPCServer() + rpc_server.start(threaded=False) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/validations.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/validations.py new file mode 100644 index 0000000000..6e4e986ca3 --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/rpc/validations.py @@ -0,0 +1,108 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +import inspect + +from .exceptions import ( + InvalidClassMethod, + InvalidTestCasePort, + InvalidKeyWordParameters, + UnsupportedArgumentType, + FileNotSavedOnDisk, +) + + +def get_source_file_path(function): + """ + Gets the full path to the source code. + + :param callable function: A callable. + :return str: A file path. + """ + client_module = inspect.getmodule(function) + return client_module.__file__ + + +def get_line_link(function): + """ + Gets the line number of a function. + + :param callable function: A callable. + :return int: The line number + """ + lines, line_number = inspect.getsourcelines(function) + file_path = get_source_file_path(function) + return f' File "{file_path}", line {line_number}' + + +def validate_arguments(function, args): + """ + Validates arguments to ensure they are a supported type. + + :param callable function: A function reference. + :param tuple(Any) args: A list of arguments. + """ + supported_types = [str, int, float, tuple, list, dict, bool] + line_link = get_line_link(function) + for arg in args: + if arg is None: + continue + + if type(arg) not in supported_types: + raise UnsupportedArgumentType(function, arg, supported_types, line_link=line_link) + + +def validate_test_case_class(cls): + """ + This is use to validate a subclass of RPCTestCase. While building your test + suite you can call this method on each class preemptively to validate that it + was defined correctly. + + :param RPCTestCase cls: A class. + :param str file_path: Optionally, a file path to the test case can be passed to give + further context into where the error is occurring. + """ + line_link = get_line_link(cls) + if not cls.__dict__.get('port'): + raise InvalidTestCasePort(cls, line_link=line_link) + + for attribute, method in cls.__dict__.items(): + if callable(method) and not isinstance(method, staticmethod): + if method.__name__.startswith('test'): + raise InvalidClassMethod(cls, method, line_link=line_link) + + +def validate_class_method(cls, method): + """ + Validates a method on a class. + + :param Any cls: A class. + :param callable method: A callable. + """ + if callable(method) and not isinstance(method, staticmethod): + line_link = get_line_link(method) + raise InvalidClassMethod(cls, method, line_link=line_link) + + +def validate_key_word_parameters(function, kwargs): + """ + Validates a method on a class. + + :param callable function: A callable. + :param dict kwargs: A dictionary of key word arguments. + """ + if kwargs: + line_link = get_line_link(function) + raise InvalidKeyWordParameters(function, kwargs, line_link=line_link) + + +def validate_file_is_saved(function): + """ + Validates that the file that the function is from is saved on disk. + + :param callable function: A callable. + """ + try: + inspect.getsourcelines(function) + except OSError: + raise FileNotSavedOnDisk(function) diff --git a/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/submit_deadline_job.py b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/submit_deadline_job.py new file mode 100644 index 0000000000..e5c6e0bebb --- /dev/null +++ b/client/ayon_deadline/repository/custom/plugins/UnrealEngine5/ue_utils/submit_deadline_job.py @@ -0,0 +1,72 @@ +# Copyright Epic Games, Inc. All Rights Reserved +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +from System.Collections.Specialized import StringCollection +from System.IO import StreamWriter, Path +from System.Text import Encoding + +from Deadline.Scripting import ClientUtils + + +def submit_job(name, job_info, plugin_info, aux_files=None): + """ + Creates a job and plugin file and submits it to deadline as a job + :param name: Name of the plugin + :param job_info: The job dictionary + :type job_info dict + :param plugin_info: The plugin dictionary + :type plugin_info dict + :param aux_files: The files submitted to the farm + :type aux_files list + """ + + # Create a job file + JobInfoFilename = Path.Combine( + ClientUtils.GetDeadlineTempPath(), + "{name}_job_info.job".format(name=name), + ) + # Get a job info file writer + writer = StreamWriter(JobInfoFilename, False, Encoding.Unicode) + + for key, value in job_info.items(): + writer.WriteLine("{key}={value}".format(key=key, value=value)) + + writer.Close() + + # Create a plugin file + PluginInfoFilename = Path.Combine( + ClientUtils.GetDeadlineTempPath(), + "{name}_plugin_info.job".format(name=name), + ) + # Get a plugin info file writer + writer = StreamWriter(PluginInfoFilename, False, Encoding.Unicode) + + for key, value in plugin_info.items(): + writer.WriteLine("{key}={value}".format(key=key, value=value)) + + # Add Aux Files if any + if aux_files: + for index, aux_files in enumerate(aux_files): + writer.WriteLine( + "File{index}={val}".format(index=index, val=aux_files) + ) + + writer.Close() + + # Create the commandline arguments + args = StringCollection() + + args.Add(JobInfoFilename) + args.Add(PluginInfoFilename) + + # Add aux files to the plugin data + if aux_files: + for scene_file in aux_files: + args.Add(scene_file) + + + results = ClientUtils.ExecuteCommandAndGetOutput(args) + + # TODO: Return the Job ID and results + + return results diff --git a/client/ayon_deadline/repository/readme.md b/client/ayon_deadline/repository/readme.md index 31ffffd0b7..1efe94c63e 100644 --- a/client/ayon_deadline/repository/readme.md +++ b/client/ayon_deadline/repository/readme.md @@ -9,8 +9,8 @@ GlobalJobPreLoad ----- -The `GlobalJobPreLoad` will retrieve the OpenPype executable path from the -`OpenPype` Deadline Plug-in's settings. Then it will call the executable to +The `GlobalJobPreLoad` will retrieve the AYON executable path from the +`Ayon` Deadline Plug-in's settings. Then it will call the executable to retrieve the environment variables needed for the Deadline Job. These environment variables are injected into rendering process. @@ -22,8 +22,16 @@ for old Pype2 and non-OpenPype triggered jobs. Plugin ------ - For each render and publishing job the `OpenPype` Deadline Plug-in is checked + For each render and publishing job the `AYON` Deadline Plug-in is checked for the configured location of the OpenPype executable (needs to be configured in `Deadline's Configure Plugins > OpenPype`) through `GlobalJobPreLoad`. + +Unreal5 Plugin +-------------- +Whole Unreal5 plugin copied here as it is not possible to add to custom folder only `JobPreLoad.py` and `UnrealSyncUtil.py` which is handling Perforce. +Might need to be revisited as this would create dependency on official Unreal5 plugin. + +`JobPreLoad.py` and `UnrealSyncUtil.py` handles Perforce syncing, must be triggered before Unreal rendering job. +It would better to have here only these two files here, but deployment wouldn't be straightforward copy as for other plugins. diff --git a/ruff.toml b/ruff.toml index c6b550b68e..36bd84fa29 100644 --- a/ruff.toml +++ b/ruff.toml @@ -28,6 +28,7 @@ exclude = [ "venv", "client/ayon_deadline/repository/custom/plugins/CelAction/*", "client/ayon_deadline/repository/custom/plugins/HarmonyAYON/*", + "client/ayon_deadline/repository/custom/plugins/UnrealEngine5", ] # Same as Black.