diff --git a/client/ayon_maya/__init__.py b/client/ayon_maya/__init__.py new file mode 100644 index 00000000..bb940a88 --- /dev/null +++ b/client/ayon_maya/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + MayaAddon, + MAYA_ROOT_DIR, +) + + +__all__ = ( + "MayaAddon", + "MAYA_ROOT_DIR", +) diff --git a/client/ayon_maya/addon.py b/client/ayon_maya/addon.py new file mode 100644 index 00000000..1ad0fcf4 --- /dev/null +++ b/client/ayon_maya/addon.py @@ -0,0 +1,46 @@ +import os +from ayon_core.addon import AYONAddon, IHostAddon + +MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class MayaAddon(AYONAddon, IHostAddon): + name = "maya" + host_name = "maya" + + def add_implementation_envs(self, env, _app): + # Add requirements to PYTHONPATH + new_python_paths = [ + os.path.join(MAYA_ROOT_DIR, "startup") + ] + old_python_path = env.get("PYTHONPATH") or "" + for path in old_python_path.split(os.pathsep): + if not path: + continue + + norm_path = os.path.normpath(path) + if norm_path not in new_python_paths: + new_python_paths.append(norm_path) + + # add vendor path + new_python_paths.append( + os.path.join(MAYA_ROOT_DIR, "vendor", "python") + ) + env["PYTHONPATH"] = os.pathsep.join(new_python_paths) + + # Set default environments + envs = { + "AYON_LOG_NO_COLORS": "1", + } + for key, value in envs.items(): + env[key] = value + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(MAYA_ROOT_DIR, "hooks") + ] + + def get_workfile_extensions(self): + return [".ma", ".mb"] diff --git a/client/ayon_maya/api/__init__.py b/client/ayon_maya/api/__init__.py new file mode 100644 index 00000000..0948282f --- /dev/null +++ b/client/ayon_maya/api/__init__.py @@ -0,0 +1,74 @@ +"""Public API + +Anything that isn't defined here is INTERNAL and unreliable for external use. + +""" + +from .pipeline import ( + uninstall, + + ls, + containerise, + MayaHost, +) +from .plugin import ( + Creator, + Loader +) + +from .workio import ( + open_file, + save_file, + current_file, + has_unsaved_changes, + file_extensions, + work_root +) + +from .lib import ( + lsattr, + lsattrs, + read, + + apply_shaders, + maintained_selection, + suspended_refresh, + + unique_namespace, +) + + +__all__ = [ + "uninstall", + + "ls", + "containerise", + "MayaHost", + + "Creator", + "Loader", + + # Workfiles API + "open_file", + "save_file", + "current_file", + "has_unsaved_changes", + "file_extensions", + "work_root", + + # Utility functions + "lsattr", + "lsattrs", + "read", + + "unique_namespace", + + "apply_shaders", + "maintained_selection", + "suspended_refresh", + +] + +# Backwards API compatibility +open = open_file +save = save_file diff --git a/client/ayon_maya/api/action.py b/client/ayon_maya/api/action.py new file mode 100644 index 00000000..d845ac60 --- /dev/null +++ b/client/ayon_maya/api/action.py @@ -0,0 +1,146 @@ +# absolute_import is needed to counter the `module has no cmds error` in Maya +from __future__ import absolute_import + +import pyblish.api +import ayon_api + +from ayon_core.pipeline.publish import ( + get_errored_instances_from_context, + get_errored_plugins_from_context +) + + +class GenerateUUIDsOnInvalidAction(pyblish.api.Action): + """Generate UUIDs on the invalid nodes in the instance. + + Invalid nodes are those returned by the plugin's `get_invalid` method. + As such it is the plug-in's responsibility to ensure the nodes that + receive new UUIDs are actually invalid. + + Requires: + - instance.data["folderPath"] + + """ + + label = "Regenerate UUIDs" + on = "failed" # This action is only available on a failed plug-in + icon = "wrench" # Icon from Awesome Icon + + def process(self, context, plugin): + + from maya import cmds + + self.log.info("Finding bad nodes..") + + errored_instances = get_errored_instances_from_context(context) + + # Apply pyblish logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + # Get the nodes from the all instances that ran through this plug-in + all_invalid = [] + for instance in instances: + invalid = plugin.get_invalid(instance) + + # Don't allow referenced nodes to get their ids regenerated to + # avoid loaded content getting messed up with reference edits + if invalid: + referenced = {node for node in invalid if + cmds.referenceQuery(node, isNodeReferenced=True)} + if referenced: + self.log.warning("Skipping UUID generation on referenced " + "nodes: {}".format(list(referenced))) + invalid = [node for node in invalid + if node not in referenced] + + if invalid: + + self.log.info("Fixing instance {}".format(instance.name)) + self._update_id_attribute(instance, invalid) + + all_invalid.extend(invalid) + + if not all_invalid: + self.log.info("No invalid nodes found.") + return + + all_invalid = list(set(all_invalid)) + self.log.info("Generated ids on nodes: {0}".format(all_invalid)) + + def _update_id_attribute(self, instance, nodes): + """Delete the id attribute + + Args: + instance: The instance we're fixing for + nodes (list): all nodes to regenerate ids on + """ + + from . import lib + + # Expecting this is called on validators in which case 'folderEntity' + # should be always available, but kept a way to query it by name. + folder_entity = instance.data.get("folderEntity") + if not folder_entity: + folder_path = instance.data["folderPath"] + project_name = instance.context.data["projectName"] + self.log.info(( + "Folder is not stored on instance." + " Querying by path \"{}\" from project \"{}\"" + ).format(folder_path, project_name)) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + + for node, _id in lib.generate_ids( + nodes, folder_id=folder_entity["id"] + ): + lib.set_id(node, _id, overwrite=True) + + +class SelectInvalidAction(pyblish.api.Action): + """Select invalid nodes in Maya when plug-in failed. + + To retrieve the invalid nodes this assumes a static `get_invalid()` + method is available on the plugin. + + """ + label = "Select invalid" + on = "failed" # This action is only available on a failed plug-in + icon = "search" # Icon from Awesome Icon + + def process(self, context, plugin): + + try: + from maya import cmds + except ImportError: + raise ImportError("Current host is not Maya") + + # Get the invalid nodes for the plug-ins + self.log.info("Finding invalid nodes..") + invalid = list() + if issubclass(plugin, pyblish.api.ContextPlugin): + errored_plugins = get_errored_plugins_from_context(context) + if plugin in errored_plugins: + invalid = plugin.get_invalid(context) + else: + errored_instances = get_errored_instances_from_context( + context, plugin=plugin + ) + for instance in errored_instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") + + # Ensure unique (process each node only once) + invalid = list(set(invalid)) + + if invalid: + self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid)) + cmds.select(invalid, replace=True, noExpand=True) + else: + self.log.info("No invalid nodes found.") + cmds.select(deselect=True) diff --git a/client/ayon_maya/api/alembic.py b/client/ayon_maya/api/alembic.py new file mode 100644 index 00000000..007e3ce4 --- /dev/null +++ b/client/ayon_maya/api/alembic.py @@ -0,0 +1,350 @@ +import json +import logging +import os + +from maya import cmds # noqa + +from ayon_maya.api.lib import evaluation + +log = logging.getLogger(__name__) + +# The maya alembic export types +ALEMBIC_ARGS = { + "attr": (list, tuple), + "attrPrefix": (list, tuple), + "autoSubd": bool, + "dataFormat": str, + "endFrame": float, + "eulerFilter": bool, + "frameRange": str, # "start end"; overrides startFrame & endFrame + "frameRelativeSample": float, + "melPerFrameCallback": str, + "melPostJobCallback": str, + "noNormals": bool, + "preRoll": bool, + "pythonPerFrameCallback": str, + "pythonPostJobCallback": str, + "renderableOnly": bool, + "root": (list, tuple), + "selection": bool, + "startFrame": float, + "step": float, + "stripNamespaces": bool, + "userAttr": (list, tuple), + "userAttrPrefix": (list, tuple), + "uvWrite": bool, + "uvsOnly": bool, + "verbose": bool, + "wholeFrameGeo": bool, + "worldSpace": bool, + "writeColorSets": bool, + "writeCreases": bool, # Maya 2015 Ext1+ + "writeFaceSets": bool, + "writeUVSets": bool, # Maya 2017+ + "writeVisibility": bool, +} + + +def extract_alembic( + file, + attr=None, + attrPrefix=None, + dataFormat="ogawa", + endFrame=None, + eulerFilter=True, + frameRange="", + melPerFrameCallback=None, + melPostJobCallback=None, + noNormals=False, + preRoll=False, + preRollStartFrame=0, + pythonPerFrameCallback=None, + pythonPostJobCallback=None, + renderableOnly=False, + root=None, + selection=True, + startFrame=None, + step=1.0, + stripNamespaces=True, + userAttr=None, + userAttrPrefix=None, + uvsOnly=False, + uvWrite=True, + verbose=False, + wholeFrameGeo=False, + worldSpace=False, + writeColorSets=False, + writeCreases=False, + writeFaceSets=False, + writeUVSets=False, + writeVisibility=False +): + """Extract a single Alembic Cache. + + This extracts an Alembic cache using the `-selection` flag to minimize + the extracted content to solely what was Collected into the instance. + + Arguments: + file (str): The filepath to write the alembic file to. + + attr (list of str, optional): A specific geometric attribute to write + out. Defaults to []. + + attrPrefix (list of str, optional): Prefix filter for determining which + geometric attributes to write out. Defaults to ["ABC_"]. + + dataFormat (str): The data format to use for the cache, + defaults to "ogawa" + + endFrame (float): End frame of output. Ignored if `frameRange` + provided. + + eulerFilter (bool): When on, X, Y, and Z rotation data is filtered with + an Euler filter. Euler filtering helps resolve irregularities in + rotations especially if X, Y, and Z rotations exceed 360 degrees. + Defaults to True. + + frameRange (tuple or str): Two-tuple with start and end frame or a + string formatted as: "startFrame endFrame". This argument + overrides `startFrame` and `endFrame` arguments. + + melPerFrameCallback (Optional[str]): MEL callback run per frame. + + melPostJobCallback (Optional[str]): MEL callback after last frame is + written. + + noNormals (bool): When on, normal data from the original polygon + objects is not included in the exported Alembic cache file. + + preRoll (bool): This frame range will not be sampled. + Defaults to False. + + preRollStartFrame (float): The frame to start scene + evaluation at. This is used to set the starting frame for time + dependent translations and can be used to evaluate run-up that + isn't actually translated. Defaults to 0. + + pythonPerFrameCallback (Optional[str]): Python callback run per frame. + + pythonPostJobCallback (Optional[str]): Python callback after last frame + is written. + + renderableOnly (bool): When on, any non-renderable nodes or hierarchy, + such as hidden objects, are not included in the Alembic file. + Defaults to False. + + root (list of str): Maya dag path which will be parented to + the root of the Alembic file. Defaults to [], which means the + entire scene will be written out. + + selection (bool): Write out all all selected nodes from the + active selection list that are descendents of the roots specified + with -root. Defaults to False. + + startFrame (float): Start frame of output. Ignored if `frameRange` + provided. + + step (float): The time interval (expressed in frames) at + which the frame range is sampled. Additional samples around each + frame can be specified with -frs. Defaults to 1.0. + + stripNamespaces (bool): When on, any namespaces associated with the + exported objects are removed from the Alembic file. For example, an + object with the namespace taco:foo:bar appears as bar in the + Alembic file. + + userAttr (list of str, optional): A specific user defined attribute to + write out. Defaults to []. + + userAttrPrefix (list of str, optional): Prefix filter for determining + which user defined attributes to write out. Defaults to []. + + uvsOnly (bool): When on, only uv data for PolyMesh and SubD shapes + will be written to the Alembic file. + + uvWrite (bool): When on, UV data from polygon meshes and subdivision + objects are written to the Alembic file. Only the current UV map is + included. + + verbose (bool): When on, outputs frame number information to the + Script Editor or output window during extraction. + + wholeFrameGeo (bool): Data for geometry will only be written + out on whole frames. Defaults to False. + + worldSpace (bool): When on, the top node in the node hierarchy is + stored as world space. By default, these nodes are stored as local + space. Defaults to False. + + writeColorSets (bool): Write all color sets on MFnMeshes as + color 3 or color 4 indexed geometry parameters with face varying + scope. Defaults to False. + + writeCreases (bool): If the mesh has crease edges or crease + vertices, the mesh (OPolyMesh) would now be written out as an OSubD + and crease info will be stored in the Alembic file. Otherwise, + creases info won't be preserved in Alembic file unless a custom + Boolean attribute SubDivisionMesh has been added to mesh node and + its value is true. Defaults to False. + + writeFaceSets (bool): Write all Face sets on MFnMeshes. + Defaults to False. + + writeUVSets (bool): Write all uv sets on MFnMeshes as vector + 2 indexed geometry parameters with face varying scope. Defaults to + False. + + writeVisibility (bool): Visibility state will be stored in + the Alembic file. Otherwise everything written out is treated as + visible. Defaults to False. + """ + + # Ensure alembic exporter is loaded + cmds.loadPlugin('AbcExport', quiet=True) + + # Alembic Exporter requires forward slashes + file = file.replace('\\', '/') + + # Ensure list arguments are valid. + attr = attr or [] + attrPrefix = attrPrefix or [] + userAttr = userAttr or [] + userAttrPrefix = userAttrPrefix or [] + root = root or [] + + # Pass the start and end frame on as `frameRange` so that it + # never conflicts with that argument + if not frameRange: + # Fallback to maya timeline if no start or end frame provided. + if startFrame is None: + startFrame = cmds.playbackOptions(query=True, + animationStartTime=True) + if endFrame is None: + endFrame = cmds.playbackOptions(query=True, + animationEndTime=True) + + # Ensure valid types are converted to frame range + assert isinstance(startFrame, ALEMBIC_ARGS["startFrame"]) + assert isinstance(endFrame, ALEMBIC_ARGS["endFrame"]) + frameRange = "{0} {1}".format(startFrame, endFrame) + else: + # Allow conversion from tuple for `frameRange` + if isinstance(frameRange, (list, tuple)): + assert len(frameRange) == 2 + frameRange = "{0} {1}".format(frameRange[0], frameRange[1]) + + # Assemble options + options = { + "selection": selection, + "frameRange": frameRange, + "eulerFilter": eulerFilter, + "noNormals": noNormals, + "preRoll": preRoll, + "root": root, + "renderableOnly": renderableOnly, + "uvWrite": uvWrite, + "uvsOnly": uvsOnly, + "writeColorSets": writeColorSets, + "writeFaceSets": writeFaceSets, + "wholeFrameGeo": wholeFrameGeo, + "worldSpace": worldSpace, + "writeVisibility": writeVisibility, + "writeUVSets": writeUVSets, + "writeCreases": writeCreases, + "dataFormat": dataFormat, + "step": step, + "attr": attr, + "attrPrefix": attrPrefix, + "userAttr": userAttr, + "userAttrPrefix": userAttrPrefix, + "stripNamespaces": stripNamespaces, + "verbose": verbose + } + + # Validate options + for key, value in options.copy().items(): + + # Discard unknown options + if key not in ALEMBIC_ARGS: + log.warning("extract_alembic() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + continue + + # Validate value type + valid_types = ALEMBIC_ARGS[key] + if not isinstance(value, valid_types): + raise TypeError("Alembic option unsupported type: " + "{0} (expected {1})".format(value, valid_types)) + + # Ignore empty values, like an empty string, since they mess up how + # job arguments are built + if isinstance(value, (list, tuple)): + value = [x for x in value if x.strip()] + + # Ignore option completely if no values remaining + if not value: + options.pop(key) + continue + + options[key] = value + + # The `writeCreases` argument was changed to `autoSubd` in Maya 2018+ + maya_version = int(cmds.about(version=True)) + if maya_version >= 2018: + options['autoSubd'] = options.pop('writeCreases', False) + + # Only add callbacks if they are set so that we're not passing `None` + callbacks = { + "melPerFrameCallback": melPerFrameCallback, + "melPostJobCallback": melPostJobCallback, + "pythonPerFrameCallback": pythonPerFrameCallback, + "pythonPostJobCallback": pythonPostJobCallback, + } + for key, callback in callbacks.items(): + if callback: + options[key] = str(callback) + + # Format the job string from options + job_args = list() + for key, value in options.items(): + if isinstance(value, (list, tuple)): + for entry in value: + job_args.append("-{} {}".format(key, entry)) + elif isinstance(value, bool): + # Add only when state is set to True + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + job_str += ' -file "%s"' % file + + # Ensure output directory exists + parent_dir = os.path.dirname(file) + if not os.path.exists(parent_dir): + os.makedirs(parent_dir) + + if verbose: + log.debug("Preparing Alembic export with options: %s", + json.dumps(options, indent=4)) + log.debug("Extracting Alembic with job arguments: %s", job_str) + + # Perform extraction + print("Alembic Job Arguments : {}".format(job_str)) + + # Disable the parallel evaluation temporarily to ensure no buggy + # exports are made. (PLN-31) + # TODO: Make sure this actually fixes the issues + with evaluation("off"): + cmds.AbcExport( + j=job_str, + verbose=verbose, + preRollStartFrame=preRollStartFrame + ) + + if verbose: + log.debug("Extracted Alembic to: %s", file) + + return file diff --git a/client/ayon_maya/api/commands.py b/client/ayon_maya/api/commands.py new file mode 100644 index 00000000..22cf0871 --- /dev/null +++ b/client/ayon_maya/api/commands.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +"""AYON script commands to be used directly in Maya.""" +from maya import cmds + +from ayon_api import get_project, get_folder_by_path + +from ayon_core.pipeline import get_current_project_name, get_current_folder_path + + +class ToolWindows: + + _windows = {} + + @classmethod + def get_window(cls, tool): + """Get widget for specific tool. + + Args: + tool (str): Name of the tool. + + Returns: + Stored widget. + + """ + try: + return cls._windows[tool] + except KeyError: + return None + + @classmethod + def set_window(cls, tool, window): + """Set widget for the tool. + + Args: + tool (str): Name of the tool. + window (QtWidgets.QWidget): Widget + + """ + cls._windows[tool] = window + + +def _resolution_from_entity(entity): + if not entity: + print("Entered entity is not valid. \"{}\"".format( + str(entity) + )) + return None + + attributes = entity.get("attrib") + if attributes is None: + attributes = entity.get("data", {}) + + resolution_width = attributes.get("resolutionWidth") + resolution_height = attributes.get("resolutionHeight") + # Backwards compatibility + if resolution_width is None or resolution_height is None: + resolution_width = attributes.get("resolution_width") + resolution_height = attributes.get("resolution_height") + + # Make sure both width and height are set + if resolution_width is None or resolution_height is None: + cmds.warning( + "No resolution information found for \"{}\"".format( + entity["name"] + ) + ) + return None + + return int(resolution_width), int(resolution_height) + + +def reset_resolution(): + # Default values + resolution_width = 1920 + resolution_height = 1080 + + # Get resolution from folder + project_name = get_current_project_name() + folder_path = get_current_folder_path() + folder_entity = get_folder_by_path(project_name, folder_path) + resolution = _resolution_from_entity(folder_entity) + # Try get resolution from project + if resolution is None: + # TODO go through visualParents + print(( + "Folder '{}' does not have set resolution." + " Trying to get resolution from project" + ).format(folder_path)) + project_entity = get_project(project_name) + resolution = _resolution_from_entity(project_entity) + + if resolution is None: + msg = "Using default resolution {}x{}" + else: + resolution_width, resolution_height = resolution + msg = "Setting resolution to {}x{}" + + print(msg.format(resolution_width, resolution_height)) + + # set for different renderers + # arnold, vray, redshift, renderman + + renderer = cmds.getAttr("defaultRenderGlobals.currentRenderer").lower() + # handle various renderman names + if renderer.startswith("renderman"): + renderer = "renderman" + + # default attributes are usable for Arnold, Renderman and Redshift + width_attr_name = "defaultResolution.width" + height_attr_name = "defaultResolution.height" + + # Vray has its own way + if renderer == "vray": + width_attr_name = "vraySettings.width" + height_attr_name = "vraySettings.height" + + cmds.setAttr(width_attr_name, resolution_width) + cmds.setAttr(height_attr_name, resolution_height) diff --git a/client/ayon_maya/api/customize.py b/client/ayon_maya/api/customize.py new file mode 100644 index 00000000..16255f69 --- /dev/null +++ b/client/ayon_maya/api/customize.py @@ -0,0 +1,179 @@ +"""A set of commands that install overrides to Maya's UI""" + +import os +import logging + +from functools import partial + +import maya.cmds as cmds +import maya.mel as mel + +from ayon_core import resources +from ayon_core.tools.utils import host_tools +from .lib import get_main_window +from ..tools import show_look_assigner + +log = logging.getLogger(__name__) + +COMPONENT_MASK_ORIGINAL = {} + + +def override_component_mask_commands(): + """Override component mask ctrl+click behavior. + + This implements special behavior for Maya's component + mask menu items where a ctrl+click will instantly make + it an isolated behavior disabling all others. + + Tested in Maya 2016 and 2018 + + """ + log.info("Installing override_component_mask_commands..") + + # Get all object mask buttons + buttons = cmds.formLayout("objectMaskIcons", + query=True, + childArray=True) + # Skip the triangle list item + buttons = [btn for btn in buttons if btn != "objPickMenuLayout"] + + def on_changed_callback(raw_command, state): + """New callback""" + + # If "control" is held force the toggled one to on and + # toggle the others based on whether any of the buttons + # was remaining active after the toggle, if not then + # enable all + if cmds.getModifiers() == 4: # = CTRL + state = True + active = [cmds.iconTextCheckBox(btn, query=True, value=True) + for btn in buttons] + if any(active): + cmds.selectType(allObjects=False) + else: + cmds.selectType(allObjects=True) + + # Replace #1 with the current button state + cmd = raw_command.replace(" #1", " {}".format(int(state))) + mel.eval(cmd) + + for btn in buttons: + + # Store a reference to the original command so that if + # we rerun this override command it doesn't recursively + # try to implement the fix. (This also allows us to + # "uninstall" the behavior later) + if btn not in COMPONENT_MASK_ORIGINAL: + original = cmds.iconTextCheckBox(btn, query=True, cc=True) + COMPONENT_MASK_ORIGINAL[btn] = original + + # Assign the special callback + original = COMPONENT_MASK_ORIGINAL[btn] + new_fn = partial(on_changed_callback, original) + cmds.iconTextCheckBox(btn, edit=True, cc=new_fn) + + +def override_toolbox_ui(): + """Add custom buttons in Toolbox as replacement for Maya web help icon.""" + icons = resources.get_resource("icons") + parent_widget = get_main_window() + + # Ensure the maya web icon on toolbox exists + button_names = [ + # Maya 2022.1+ with maya.cmds.iconTextStaticLabel + "ToolBox|MainToolboxLayout|mayaHomeToolboxButton", + # Older with maya.cmds.iconTextButton + "ToolBox|MainToolboxLayout|mayaWebButton" + ] + for name in button_names: + if cmds.control(name, query=True, exists=True): + web_button = name + break + else: + # Button does not exist + log.warning("Can't find Maya Home/Web button to override toolbox ui..") + return + + cmds.control(web_button, edit=True, visible=False) + + # real = 32, but 36 with padding - according to toolbox mel script + icon_size = 36 + parent = web_button.rsplit("|", 1)[0] + + # Ensure the parent is a formLayout + if not cmds.objectTypeUI(parent) == "formLayout": + return + + # Create our controls + controls = [] + + controls.append( + cmds.iconTextButton( + "ayon_toolbox_lookmanager", + annotation="Look Manager", + label="Look Manager", + image=os.path.join(icons, "lookmanager.png"), + command=lambda: show_look_assigner( + parent=parent_widget + ), + width=icon_size, + height=icon_size, + parent=parent + ) + ) + + controls.append( + cmds.iconTextButton( + "ayon_toolbox_workfiles", + annotation="Work Files", + label="Work Files", + image=os.path.join(icons, "workfiles.png"), + command=lambda: host_tools.show_workfiles( + parent=parent_widget + ), + width=icon_size, + height=icon_size, + parent=parent + ) + ) + + controls.append( + cmds.iconTextButton( + "ayon_toolbox_loader", + annotation="Loader", + label="Loader", + image=os.path.join(icons, "loader.png"), + command=lambda: host_tools.show_loader( + parent=parent_widget, use_context=True + ), + width=icon_size, + height=icon_size, + parent=parent + ) + ) + + controls.append( + cmds.iconTextButton( + "ayon_toolbox_manager", + annotation="Inventory", + label="Inventory", + image=os.path.join(icons, "inventory.png"), + command=lambda: host_tools.show_scene_inventory( + parent=parent_widget + ), + width=icon_size, + height=icon_size, + parent=parent + ) + ) + + # Add the buttons on the bottom and stack + # them above each other with side padding + controls.reverse() + for i, control in enumerate(controls): + previous = controls[i - 1] if i > 0 else web_button + + cmds.formLayout(parent, edit=True, + attachControl=[control, "bottom", 0, previous], + attachForm=([control, "left", 1], + [control, "right", 1])) diff --git a/client/ayon_maya/api/exitstack.py b/client/ayon_maya/api/exitstack.py new file mode 100644 index 00000000..c35724e8 --- /dev/null +++ b/client/ayon_maya/api/exitstack.py @@ -0,0 +1,139 @@ +"""Backwards compatible implementation of ExitStack for Python 2. + +ExitStack contextmanager was implemented with Python 3.3. +As long as we supportPython 2 hosts we can use this backwards +compatible implementation to support bothPython 2 and Python 3. + +Instead of using ExitStack from contextlib, use it from this module: + +>>> from ayon_maya.api.exitstack import ExitStack + +It will provide the appropriate ExitStack implementation for the current +running Python version. + +""" +# TODO: Remove the entire script once dropping Python 2 support. +import contextlib +if getattr(contextlib, "nested", None): + from contextlib import ExitStack # noqa +else: + import sys + from collections import deque + + class ExitStack(object): + + """Context manager for dynamic management of a stack of exit callbacks + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) + for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception + + """ + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring + it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks + to __exit__ methods""" + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to + # match the with statement + _cm_type = type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + + def _fix_exception_context(new_exc, old_exc): + while 1: + exc_context = new_exc.__context__ + if exc_context in (None, frame_exc): + break + new_exc = exc_context + new_exc.__context__ = old_exc + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + exc_details = (None, None, None) + except Exception: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + if not self._exit_callbacks: + raise + exc_details = new_exc_details + return suppressed_exc diff --git a/client/ayon_maya/api/fbx.py b/client/ayon_maya/api/fbx.py new file mode 100644 index 00000000..28a40585 --- /dev/null +++ b/client/ayon_maya/api/fbx.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- +"""Tools to work with FBX.""" +import logging + +from maya import cmds # noqa +import maya.mel as mel # noqa +from ayon_maya.api.lib import maintained_selection + + +class FBXExtractor: + """Extract FBX from Maya. + + This extracts reproducible FBX exports ignoring any of the settings set + on the local machine in the FBX export options window. + + All export settings are applied with the `FBXExport*` commands prior + to the `FBXExport` call itself. The options can be overridden with + their + nice names as seen in the "options" property on this class. + + For more information on FBX exports see: + - https://knowledge.autodesk.com/support/maya/learn-explore/caas + /CloudHelp/cloudhelp/2016/ENU/Maya/files/GUID-6CCE943A-2ED4-4CEE-96D4 + -9CB19C28F4E0-htm.html + - http://forums.cgsociety.org/archive/index.php?t-1032853.html + - https://groups.google.com/forum/#!msg/python_inside_maya/cLkaSo361oE + /LKs9hakE28kJ + + """ + @property + def options(self): + """Overridable options for FBX Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "cameras": bool, + "smoothingGroups": bool, + "hardEdges": bool, + "tangents": bool, + "smoothMesh": bool, + "instances": bool, + # "referencedContainersContent": bool, # deprecated in Maya 2016+ + "bakeComplexAnimation": bool, + "bakeComplexStart": int, + "bakeComplexEnd": int, + "bakeComplexStep": int, + "bakeResampleAnimation": bool, + "useSceneName": bool, + "quaternion": str, # "euler" + "shapes": bool, + "skins": bool, + "constraints": bool, + "lights": bool, + "embeddedTextures": bool, + "includeChildren": bool, + "inputConnections": bool, + "upAxis": str, # x, y or z, + "triangulate": bool, + "fileVersion": str, + "skeletonDefinitions": bool, + "referencedAssetsContent": bool + } + + @property + def default_options(self): + """The default options for FBX extraction. + + This includes shapes, skins, constraints, lights and incoming + connections and exports with the Y-axis as up-axis. + + By default this uses the time sliders start and end time. + + """ + + start_frame = int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = int(cmds.playbackOptions(query=True, + animationEndTime=True)) + + return { + "cameras": False, + "smoothingGroups": True, + "hardEdges": False, + "tangents": False, + "smoothMesh": True, + "instances": False, + "bakeComplexAnimation": True, + "bakeComplexStart": start_frame, + "bakeComplexEnd": end_frame, + "bakeComplexStep": 1, + "bakeResampleAnimation": True, + "useSceneName": False, + "quaternion": "euler", + "shapes": True, + "skins": True, + "constraints": False, + "lights": True, + "embeddedTextures": False, + "includeChildren": True, + "inputConnections": True, + "upAxis": "y", + "triangulate": False, + "fileVersion": "FBX202000", + "skeletonDefinitions": False, + "referencedAssetsContent": False + } + + def __init__(self, log=None): + # Ensure FBX plug-in is loaded + self.log = log or logging.getLogger(self.__class__.__name__) + cmds.loadPlugin("fbxmaya", quiet=True) + + def parse_overrides(self, instance, options): + """Inspect data of instance to determine overridden options + + An instance may supply any of the overridable options + as data, the option is then added to the extraction. + + """ + + for key in instance.data: + if key not in self.options: + continue + + # Ensure the data is of correct type + value = instance.data[key] + if not isinstance(value, self.options[key]): + self.log.warning( + "Overridden attribute {key} was of " + "the wrong type: {invalid_type} " + "- should have been {valid_type}".format( + key=key, + invalid_type=type(value).__name__, + valid_type=self.options[key].__name__)) + continue + + options[key] = value + + return options + + def set_options_from_instance(self, instance): + """Sets FBX export options from data in the instance. + + Args: + instance (Instance): Instance data. + + """ + # Parse export options + options = self.default_options + options = self.parse_overrides(instance, options) + self.log.debug("Export options: {0}".format(options)) + + # Collect the start and end including handles + start = instance.data.get("frameStartHandle") or \ + instance.context.data.get("frameStartHandle") + end = instance.data.get("frameEndHandle") or \ + instance.context.data.get("frameEndHandle") + + options['bakeComplexStart'] = start + options['bakeComplexEnd'] = end + + # First apply the default export settings to be fully consistent + # each time for successive publishes + mel.eval("FBXResetExport") + + # Apply the FBX overrides through MEL since the commands + # only work correctly in MEL according to online + # available discussions on the topic + _iteritems = getattr(options, "iteritems", options.items) + for option, value in _iteritems(): + key = option[0].upper() + option[1:] # uppercase first letter + + # Boolean must be passed as lower-case strings + # as to MEL standards + if isinstance(value, bool): + value = str(value).lower() + + template = "FBXExport{0} {1}" if key == "UpAxis" else \ + "FBXExport{0} -v {1}" # noqa + cmd = template.format(key, value) + self.log.debug(cmd) + mel.eval(cmd) + + # Never show the UI or generate a log + mel.eval("FBXExportShowUI -v false") + mel.eval("FBXExportGenerateLog -v false") + + @staticmethod + def export(members, path): + # type: (list, str) -> None + """Export members as FBX with given path. + + Args: + members (list): List of members to export. + path (str): Path to use for export. + + """ + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace("\\", "/") + with maintained_selection(): + cmds.select(members, r=True, noExpand=True) + mel.eval('FBXExport -f "{}" -s'.format(path)) diff --git a/client/ayon_maya/api/gltf.py b/client/ayon_maya/api/gltf.py new file mode 100644 index 00000000..9aa4bf37 --- /dev/null +++ b/client/ayon_maya/api/gltf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +"""Tools to work with GLTF.""" +import logging + +from maya import cmds, mel # noqa + +log = logging.getLogger(__name__) + +_gltf_options = { + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName # codespell:ignore acn + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials + "eut": bool, # excludeUnusedTexcoord + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly +} + + +def extract_gltf(parent_dir, + filename, + **kwargs): + + """Sets GLTF export options from data in the instance. + + """ + + cmds.loadPlugin('maya2glTF', quiet=True) + # load the UI to run mel command + mel.eval("maya2glTF_UI()") + + parent_dir = parent_dir.replace('\\', '/') + options = { + "dsa": 1, + "glb": True + } + options.update(kwargs) + + for key, value in options.copy().items(): + if key not in _gltf_options: + log.warning("extract_gltf() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + options.pop(value) + continue + + job_args = list() + default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa + job_args.append(default_opt) + + for key, value in options.items(): + if isinstance(value, str): + job_args.append("-{0} \"{1}\"".format(key, value)) + elif isinstance(value, bool): + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + log.info("{}".format(job_str)) + mel.eval(job_str) + + # close the gltf export after finish the export + gltf_UI = "maya2glTF_exporter_window" + if cmds.window(gltf_UI, q=True, exists=True): + cmds.deleteUI(gltf_UI) diff --git a/client/ayon_maya/api/lib.py b/client/ayon_maya/api/lib.py new file mode 100644 index 00000000..2b41ffc0 --- /dev/null +++ b/client/ayon_maya/api/lib.py @@ -0,0 +1,4234 @@ +"""Standalone helper functions""" + +import os +import copy +from pprint import pformat +import sys +import uuid +import re + +import json +import logging +import contextlib +import capture +from .exitstack import ExitStack +from collections import OrderedDict, defaultdict +from math import ceil +from six import string_types + +from maya import cmds, mel +from maya.api import OpenMaya + +import ayon_api + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + get_current_project_name, + get_current_folder_path, + get_current_task_name, + discover_loader_plugins, + loaders_from_representation, + get_representation_path, + load_container, + registered_host, + AVALON_CONTAINER_ID, + AVALON_INSTANCE_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, +) +from ayon_core.lib import NumberDef +from ayon_core.pipeline.context_tools import get_current_task_entity +from ayon_core.pipeline.create import CreateContext +from ayon_core.lib.profiles_filtering import filter_profiles + + +self = sys.modules[__name__] +self._parent = None + +log = logging.getLogger(__name__) + +IS_HEADLESS = not hasattr(cmds, "about") or cmds.about(batch=True) +ATTRIBUTE_DICT = {"int": {"attributeType": "long"}, + "str": {"dataType": "string"}, + "unicode": {"dataType": "string"}, + "float": {"attributeType": "double"}, + "bool": {"attributeType": "bool"}} + +SHAPE_ATTRS = {"castsShadows", + "receiveShadows", + "motionBlur", + "primaryVisibility", + "smoothShading", + "visibleInReflections", + "visibleInRefractions", + "doubleSided", + "opposite"} + + +DEFAULT_MATRIX = [1.0, 0.0, 0.0, 0.0, + 0.0, 1.0, 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, + 0.0, 0.0, 0.0, 1.0] + +INT_FPS = {15, 24, 25, 30, 48, 50, 60, 44100, 48000} +FLOAT_FPS = {23.98, 23.976, 29.97, 47.952, 59.94} + + +DISPLAY_LIGHTS_ENUM = [ + {"label": "Use Project Settings", "value": "project_settings"}, + {"label": "Default Lighting", "value": "default"}, + {"label": "All Lights", "value": "all"}, + {"label": "Selected Lights", "value": "selected"}, + {"label": "Flat Lighting", "value": "flat"}, + {"label": "No Lights", "value": "none"} +] + + +def get_main_window(): + """Acquire Maya's main window""" + from qtpy import QtWidgets + + if self._parent is None: + self._parent = { + widget.objectName(): widget + for widget in QtWidgets.QApplication.topLevelWidgets() + }["MayaWindow"] + return self._parent + + +@contextlib.contextmanager +def suspended_refresh(suspend=True): + """Suspend viewport refreshes + + cmds.ogs(pause=True) is a toggle so we can't pass False. + """ + if IS_HEADLESS: + yield + return + + original_state = cmds.ogs(query=True, pause=True) + try: + if suspend and not original_state: + cmds.ogs(pause=True) + yield + finally: + if suspend and not original_state: + cmds.ogs(pause=True) + + +@contextlib.contextmanager +def maintained_selection(): + """Maintain selection during context + + Example: + >>> scene = cmds.file(new=True, force=True) + >>> node = cmds.createNode("transform", name="Test") + >>> cmds.select("persp") + >>> with maintained_selection(): + ... cmds.select("Test", replace=True) + >>> "Test" in cmds.ls(selection=True) + False + + """ + + previous_selection = cmds.ls(selection=True) + try: + yield + finally: + if previous_selection: + cmds.select(previous_selection, + replace=True, + noExpand=True) + else: + cmds.select(clear=True) + + +def reload_all_udim_tile_previews(): + """Regenerate all UDIM tile preview in texture file""" + for texture_file in cmds.ls(type="file"): + if cmds.getAttr("{}.uvTilingMode".format(texture_file)) > 0: + cmds.ogs(regenerateUVTilePreview=texture_file) + + +@contextlib.contextmanager +def panel_camera(panel, camera): + """Set modelPanel's camera during the context. + + Arguments: + panel (str): modelPanel name. + camera (str): camera name. + + """ + original_camera = cmds.modelPanel(panel, query=True, camera=True) + try: + cmds.modelPanel(panel, edit=True, camera=camera) + yield + finally: + cmds.modelPanel(panel, edit=True, camera=original_camera) + + +def render_capture_preset(preset): + """Capture playblast with a preset. + + To generate the preset use `generate_capture_preset`. + + Args: + preset (dict): preset options + + Returns: + str: Output path of `capture.capture` + """ + + # Force a refresh at the start of the timeline + # TODO (Question): Why do we need to do this? What bug does it solve? + # Is this for simulations? + cmds.refresh(force=True) + refresh_frame_int = int(cmds.playbackOptions(query=True, minTime=True)) + cmds.currentTime(refresh_frame_int - 1, edit=True) + cmds.currentTime(refresh_frame_int, edit=True) + log.debug( + "Using preset: {}".format( + json.dumps(preset, indent=4, sort_keys=True) + ) + ) + preset = copy.deepcopy(preset) + # not supported by `capture` so we pop it off of the preset + reload_textures = preset["viewport_options"].pop("loadTextures", False) + panel = preset.pop("panel") + with ExitStack() as stack: + stack.enter_context(maintained_time()) + stack.enter_context(panel_camera(panel, preset["camera"])) + stack.enter_context(viewport_default_options(panel, preset)) + if reload_textures: + # Force immediate texture loading when to ensure + # all textures have loaded before the playblast starts + stack.enter_context(material_loading_mode(mode="immediate")) + # Regenerate all UDIM tiles previews + reload_all_udim_tile_previews() + path = capture.capture(log=self.log, **preset) + + return path + + +def generate_capture_preset(instance, camera, path, + start=None, end=None, capture_preset=None): + """Function for getting all the data of preset options for + playblast capturing + + Args: + instance (pyblish.api.Instance): instance + camera (str): review camera + path (str): filepath + start (int): frameStart + end (int): frameEnd + capture_preset (dict): capture preset + + Returns: + dict: Resulting preset + """ + preset = load_capture_preset(data=capture_preset) + + preset["camera"] = camera + preset["start_frame"] = start + preset["end_frame"] = end + preset["filename"] = path + preset["overwrite"] = True + preset["panel"] = instance.data["panel"] + + # Disable viewer since we use the rendering logic for publishing + # We don't want to open the generated playblast in a viewer directly. + preset["viewer"] = False + + # "isolate_view" will already have been applied at creation, so we'll + # ignore it here. + preset.pop("isolate_view") + + # Set resolution variables from capture presets + width_preset = capture_preset["Resolution"]["width"] + height_preset = capture_preset["Resolution"]["height"] + + # Set resolution variables from folder values + folder_attributes = instance.data["folderEntity"]["attrib"] + folder_width = folder_attributes.get("resolutionWidth") + folder_height = folder_attributes.get("resolutionHeight") + review_instance_width = instance.data.get("review_width") + review_instance_height = instance.data.get("review_height") + + # Use resolution from instance if review width/height is set + # Otherwise use the resolution from preset if it has non-zero values + # Otherwise fall back to folder width x height + # Else define no width, then `capture.capture` will use render resolution + if review_instance_width and review_instance_height: + preset["width"] = review_instance_width + preset["height"] = review_instance_height + elif width_preset and height_preset: + preset["width"] = width_preset + preset["height"] = height_preset + elif folder_width and folder_height: + preset["width"] = folder_width + preset["height"] = folder_height + + # Isolate view is requested by having objects in the set besides a + # camera. If there is only 1 member it'll be the camera because we + # validate to have 1 camera only. + if instance.data["isolate"] and len(instance.data["setMembers"]) > 1: + preset["isolate"] = instance.data["setMembers"] + + # Override camera options + # Enforce persisting camera depth of field + camera_options = preset.setdefault("camera_options", {}) + camera_options["depthOfField"] = cmds.getAttr( + "{0}.depthOfField".format(camera) + ) + + # Use Pan/Zoom from instance data instead of from preset + preset.pop("pan_zoom", None) + camera_options["panZoomEnabled"] = instance.data["panZoom"] + + # Override viewport options by instance data + viewport_options = preset.setdefault("viewport_options", {}) + viewport_options["displayLights"] = instance.data["displayLights"] + viewport_options["imagePlane"] = instance.data.get("imagePlane", True) + + # Override transparency if requested. + transparency = instance.data.get("transparency", 0) + if transparency != 0: + preset["viewport2_options"]["transparencyAlgorithm"] = transparency + + # Update preset with current panel setting + # if override_viewport_options is turned off + if not capture_preset["ViewportOptions"]["override_viewport_options"]: + panel_preset = capture.parse_view(preset["panel"]) + panel_preset.pop("camera") + preset.update(panel_preset) + + return preset + + +@contextlib.contextmanager +def viewport_default_options(panel, preset): + """Context manager used by `render_capture_preset`. + + We need to explicitly enable some viewport changes so the viewport is + refreshed ahead of playblasting. + + """ + # TODO: Clarify in the docstring WHY we need to set it ahead of + # playblasting. What issues does it solve? + viewport_defaults = {} + try: + keys = [ + "useDefaultMaterial", + "wireframeOnShaded", + "xray", + "jointXray", + "backfaceCulling", + "textures" + ] + for key in keys: + viewport_defaults[key] = cmds.modelEditor( + panel, query=True, **{key: True} + ) + if preset["viewport_options"].get(key): + cmds.modelEditor( + panel, edit=True, **{key: True} + ) + yield + finally: + # Restoring viewport options. + if viewport_defaults: + cmds.modelEditor( + panel, edit=True, **viewport_defaults + ) + + +@contextlib.contextmanager +def material_loading_mode(mode="immediate"): + """Set material loading mode during context""" + original = cmds.displayPref(query=True, materialLoadingMode=True) + cmds.displayPref(materialLoadingMode=mode) + try: + yield + finally: + cmds.displayPref(materialLoadingMode=original) + + +def get_namespace(node): + """Return namespace of given node""" + node_name = node.rsplit("|", 1)[-1] + if ":" in node_name: + return node_name.rsplit(":", 1)[0] + else: + return "" + + +def strip_namespace(node, namespace): + """Strip given namespace from node path. + + The namespace will only be stripped from names + if it starts with that namespace. If the namespace + occurs within another namespace it's not removed. + + Examples: + >>> strip_namespace("namespace:node", namespace="namespace:") + "node" + >>> strip_namespace("hello:world:node", namespace="hello:world") + "node" + >>> strip_namespace("hello:world:node", namespace="hello") + "world:node" + >>> strip_namespace("hello:world:node", namespace="world") + "hello:world:node" + >>> strip_namespace("ns:group|ns:node", namespace="ns") + "group|node" + + Returns: + str: Node name without given starting namespace. + + """ + + # Ensure namespace ends with `:` + if not namespace.endswith(":"): + namespace = "{}:".format(namespace) + + # The long path for a node can also have the namespace + # in its parents so we need to remove it from each + return "|".join( + name[len(namespace):] if name.startswith(namespace) else name + for name in node.split("|") + ) + + +def get_custom_namespace(custom_namespace): + """Return unique namespace. + + The input namespace can contain a single group + of '#' number tokens to indicate where the namespace's + unique index should go. The amount of tokens defines + the zero padding of the number, e.g ### turns into 001. + + Warning: Note that a namespace will always be + prefixed with a _ if it starts with a digit + + Example: + >>> get_custom_namespace("myspace_##_") + # myspace_01_ + >>> get_custom_namespace("##_myspace") + # _01_myspace + >>> get_custom_namespace("myspace##") + # myspace01 + + """ + split = re.split("([#]+)", custom_namespace, 1) + + if len(split) == 3: + base, padding, suffix = split + padding = "%0{}d".format(len(padding)) + else: + base = split[0] + padding = "%02d" # default padding + suffix = "" + + return unique_namespace( + base, + format=padding, + prefix="_" if not base or base[0].isdigit() else "", + suffix=suffix + ) + + +def unique_namespace(namespace, format="%02d", prefix="", suffix=""): + """Return unique namespace + + Arguments: + namespace (str): Name of namespace to consider + format (str, optional): Formatting of the given iteration number + suffix (str, optional): Only consider namespaces with this suffix. + + >>> unique_namespace("bar") + # bar01 + >>> unique_namespace(":hello") + # :hello01 + >>> unique_namespace("bar:", suffix="_NS") + # bar01_NS: + + """ + + def current_namespace(): + current = cmds.namespaceInfo(currentNamespace=True, + absoluteName=True) + # When inside a namespace Maya adds no trailing : + if not current.endswith(":"): + current += ":" + return current + + # Always check against the absolute namespace root + # There's no clash with :x if we're defining namespace :a:x + ROOT = ":" if namespace.startswith(":") else current_namespace() + + # Strip trailing `:` tokens since we might want to add a suffix + start = ":" if namespace.startswith(":") else "" + end = ":" if namespace.endswith(":") else "" + namespace = namespace.strip(":") + if ":" in namespace: + # Split off any nesting that we don't uniqify anyway. + parents, namespace = namespace.rsplit(":", 1) + start += parents + ":" + ROOT += start + + def exists(n): + # Check for clash with nodes and namespaces + fullpath = ROOT + n + return cmds.objExists(fullpath) or cmds.namespace(exists=fullpath) + + iteration = 1 + while True: + nr_namespace = namespace + format % iteration + unique = prefix + nr_namespace + suffix + + if not exists(unique): + return start + unique + end + + iteration += 1 + + +def read(node): + """Return user-defined attributes from `node`""" + + data = dict() + + for attr in cmds.listAttr(node, userDefined=True) or list(): + try: + value = cmds.getAttr(node + "." + attr, asString=True) + + except RuntimeError: + # For Message type attribute or others that have connections, + # take source node name as value. + source = cmds.listConnections(node + "." + attr, + source=True, + destination=False) + source = cmds.ls(source, long=True) or [None] + value = source[0] + + except ValueError: + # Some attributes cannot be read directly, + # such as mesh and color attributes. These + # are considered non-essential to this + # particular publishing pipeline. + value = None + + data[attr] = value + + return data + + +def matrix_equals(a, b, tolerance=1e-10): + """ + Compares two matrices with an imperfection tolerance + + Args: + a (list, tuple): the matrix to check + b (list, tuple): the matrix to check against + tolerance (float): the precision of the differences + + Returns: + bool : True or False + + """ + if not all(abs(x - y) < tolerance for x, y in zip(a, b)): + return False + return True + + +def float_round(num, places=0, direction=ceil): + return direction(num * (10**places)) / float(10**places) + + +def pairwise(iterable): + """s -> (s0,s1), (s2,s3), (s4, s5), ...""" + from six.moves import zip + + a = iter(iterable) + return zip(a, a) + + +def collect_animation_defs(fps=False): + """Get the basic animation attribute definitions for the publisher. + + Returns: + OrderedDict + + """ + + # get scene values as defaults + frame_start = cmds.playbackOptions(query=True, minTime=True) + frame_end = cmds.playbackOptions(query=True, maxTime=True) + frame_start_handle = cmds.playbackOptions( + query=True, animationStartTime=True + ) + frame_end_handle = cmds.playbackOptions(query=True, animationEndTime=True) + + handle_start = frame_start - frame_start_handle + handle_end = frame_end_handle - frame_end + + # build attributes + defs = [ + NumberDef("frameStart", + label="Frame Start", + default=frame_start, + decimals=0), + NumberDef("frameEnd", + label="Frame End", + default=frame_end, + decimals=0), + NumberDef("handleStart", + label="Handle Start", + default=handle_start, + decimals=0), + NumberDef("handleEnd", + label="Handle End", + default=handle_end, + decimals=0), + NumberDef("step", + label="Step size", + tooltip="A smaller step size means more samples and larger " + "output files.\n" + "A 1.0 step size is a single sample every frame.\n" + "A 0.5 step size is two samples per frame.\n" + "A 0.2 step size is five samples per frame.", + default=1.0, + decimals=3), + ] + + if fps: + current_fps = mel.eval('currentTimeUnitToFPS()') + fps_def = NumberDef( + "fps", label="FPS", default=current_fps, decimals=5 + ) + defs.append(fps_def) + + return defs + + +def imprint(node, data): + """Write `data` to `node` as userDefined attributes + + Arguments: + node (str): Long name of node + data (dict): Dictionary of key/value pairs + + Example: + >>> from maya import cmds + >>> def compute(): + ... return 6 + ... + >>> cube, generator = cmds.polyCube() + >>> imprint(cube, { + ... "regularString": "myFamily", + ... "computedValue": lambda: compute() + ... }) + ... + >>> cmds.getAttr(cube + ".computedValue") + 6 + + """ + + for key, value in data.items(): + + if callable(value): + # Support values evaluated at imprint + value = value() + + if isinstance(value, bool): + add_type = {"attributeType": "bool"} + set_type = {"keyable": False, "channelBox": True} + elif isinstance(value, string_types): + add_type = {"dataType": "string"} + set_type = {"type": "string"} + elif isinstance(value, int): + add_type = {"attributeType": "long"} + set_type = {"keyable": False, "channelBox": True} + elif isinstance(value, float): + add_type = {"attributeType": "double"} + set_type = {"keyable": False, "channelBox": True} + elif isinstance(value, (list, tuple)): + add_type = {"attributeType": "enum", "enumName": ":".join(value)} + set_type = {"keyable": False, "channelBox": True} + value = 0 # enum default + else: + raise TypeError("Unsupported type: %r" % type(value)) + + cmds.addAttr(node, longName=key, **add_type) + cmds.setAttr(node + "." + key, value, **set_type) + + +def lsattr(attr, value=None): + """Return nodes matching `key` and `value` + + Arguments: + attr (str): Name of Maya attribute + value (object, optional): Value of attribute. If none + is provided, return all nodes with this attribute. + + Example: + >> lsattr("id", "myId") + ["myNode"] + >> lsattr("id") + ["myNode", "myOtherNode"] + + """ + + if value is None: + return cmds.ls("*.%s" % attr, + recursive=True, + objectsOnly=True, + long=True) + return lsattrs({attr: value}) + + +def lsattrs(attrs): + """Return nodes with the given attribute(s). + + Arguments: + attrs (dict): Name and value pairs of expected matches + + Example: + >>> # Return nodes with an `age` of five. + >>> lsattrs({"age": "five"}) + >>> # Return nodes with both `age` and `color` of five and blue. + >>> lsattrs({"age": "five", "color": "blue"}) + + Return: + list: matching nodes. + + """ + + dep_fn = OpenMaya.MFnDependencyNode() + dag_fn = OpenMaya.MFnDagNode() + selection_list = OpenMaya.MSelectionList() + + first_attr = next(iter(attrs)) + + try: + selection_list.add("*.{0}".format(first_attr), + searchChildNamespaces=True) + except RuntimeError as exc: + if str(exc).endswith("Object does not exist"): + return [] + + matches = set() + for i in range(selection_list.length()): + node = selection_list.getDependNode(i) + if node.hasFn(OpenMaya.MFn.kDagNode): + fn_node = dag_fn.setObject(node) + full_path_names = [path.fullPathName() + for path in fn_node.getAllPaths()] + else: + fn_node = dep_fn.setObject(node) + full_path_names = [fn_node.name()] + + for attr in attrs: + try: + plug = fn_node.findPlug(attr, True) + if plug.asString() != attrs[attr]: + break + except RuntimeError: + break + else: + matches.update(full_path_names) + + return list(matches) + + +@contextlib.contextmanager +def attribute_values(attr_values): + """Remaps node attributes to values during context. + + Arguments: + attr_values (dict): Dictionary with (attr, value) + + """ + + original = [(attr, cmds.getAttr(attr)) for attr in attr_values] + try: + for attr, value in attr_values.items(): + if isinstance(value, string_types): + cmds.setAttr(attr, value, type="string") + else: + cmds.setAttr(attr, value) + yield + finally: + for attr, value in original: + if isinstance(value, string_types): + cmds.setAttr(attr, value, type="string") + elif value is None and cmds.getAttr(attr, type=True) == "string": + # In some cases the maya.cmds.getAttr command returns None + # for string attributes but this value cannot assigned. + # Note: After setting it once to "" it will then return "" + # instead of None. So this would only happen once. + cmds.setAttr(attr, "", type="string") + else: + cmds.setAttr(attr, value) + + +@contextlib.contextmanager +def keytangent_default(in_tangent_type='auto', + out_tangent_type='auto'): + """Set the default keyTangent for new keys during this context""" + + original_itt = cmds.keyTangent(query=True, g=True, itt=True)[0] + original_ott = cmds.keyTangent(query=True, g=True, ott=True)[0] + cmds.keyTangent(g=True, itt=in_tangent_type) + cmds.keyTangent(g=True, ott=out_tangent_type) + try: + yield + finally: + cmds.keyTangent(g=True, itt=original_itt) + cmds.keyTangent(g=True, ott=original_ott) + + +@contextlib.contextmanager +def undo_chunk(): + """Open a undo chunk during context.""" + + try: + cmds.undoInfo(openChunk=True) + yield + finally: + cmds.undoInfo(closeChunk=True) + + +@contextlib.contextmanager +def evaluation(mode="off"): + """Set the evaluation manager during context. + + Arguments: + mode (str): The mode to apply during context. + "off": The standard DG evaluation (stable) + "serial": A serial DG evaluation + "parallel": The Maya 2016+ parallel evaluation + + """ + + original = cmds.evaluationManager(query=True, mode=1)[0] + try: + cmds.evaluationManager(mode=mode) + yield + finally: + cmds.evaluationManager(mode=original) + + +@contextlib.contextmanager +def empty_sets(sets, force=False): + """Remove all members of the sets during the context""" + + assert isinstance(sets, (list, tuple)) + + original = dict() + original_connections = [] + + # Store original state + for obj_set in sets: + members = cmds.sets(obj_set, query=True) + original[obj_set] = members + + try: + for obj_set in sets: + cmds.sets(clear=obj_set) + if force: + # Break all connections if force is enabled, this way we + # prevent Maya from exporting any reference nodes which are + # connected with placeHolder[x] attributes + plug = "%s.dagSetMembers" % obj_set + connections = cmds.listConnections(plug, + source=True, + destination=False, + plugs=True, + connections=True) or [] + original_connections.extend(connections) + for dest, src in pairwise(connections): + cmds.disconnectAttr(src, dest) + yield + finally: + + for dest, src in pairwise(original_connections): + cmds.connectAttr(src, dest) + + # Restore original members + _iteritems = getattr(original, "iteritems", original.items) + for origin_set, members in _iteritems(): + cmds.sets(members, forceElement=origin_set) + + +@contextlib.contextmanager +def renderlayer(layer): + """Set the renderlayer during the context + + Arguments: + layer (str): Name of layer to switch to. + + """ + + original = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + + try: + cmds.editRenderLayerGlobals(currentRenderLayer=layer) + yield + finally: + cmds.editRenderLayerGlobals(currentRenderLayer=original) + + +class delete_after(object): + """Context Manager that will delete collected nodes after exit. + + This allows to ensure the nodes added to the context are deleted + afterwards. This is useful if you want to ensure nodes are deleted + even if an error is raised. + + Examples: + with delete_after() as delete_bin: + cube = maya.cmds.polyCube() + delete_bin.extend(cube) + # cube exists + # cube deleted + + """ + + def __init__(self, nodes=None): + + self._nodes = list() + + if nodes: + self.extend(nodes) + + def append(self, node): + self._nodes.append(node) + + def extend(self, nodes): + self._nodes.extend(nodes) + + def __iter__(self): + return iter(self._nodes) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if self._nodes: + cmds.delete(self._nodes) + + +def get_current_renderlayer(): + return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) + + +def get_renderer(layer): + with renderlayer(layer): + return cmds.getAttr("defaultRenderGlobals.currentRenderer") + + +@contextlib.contextmanager +def no_undo(flush=False): + """Disable the undo queue during the context + + Arguments: + flush (bool): When True the undo queue will be emptied when returning + from the context losing all undo history. Defaults to False. + + """ + original = cmds.undoInfo(query=True, state=True) + keyword = 'state' if flush else 'stateWithoutFlush' + + try: + cmds.undoInfo(**{keyword: False}) + yield + finally: + cmds.undoInfo(**{keyword: original}) + + +def get_shader_assignments_from_shapes(shapes, components=True): + """Return the shape assignment per related shading engines. + + Returns a dictionary where the keys are shadingGroups and the values are + lists of assigned shapes or shape-components. + + Since `maya.cmds.sets` returns shader members on the shapes as components + on the transform we correct that in this method too. + + For the 'shapes' this will return a dictionary like: + { + "shadingEngineX": ["nodeX", "nodeY"], + "shadingEngineY": ["nodeA", "nodeB"] + } + + Args: + shapes (list): The shapes to collect the assignments for. + components (bool): Whether to include the component assignments. + + Returns: + dict: The {shadingEngine: shapes} relationships + + """ + + shapes = cmds.ls(shapes, + long=True, + shapes=True, + objectsOnly=True) + if not shapes: + return {} + + # Collect shading engines and their shapes + assignments = defaultdict(list) + for shape in shapes: + + # Get unique shading groups for the shape + shading_groups = cmds.listConnections(shape, + source=False, + destination=True, + plugs=False, + connections=False, + type="shadingEngine") or [] + shading_groups = list(set(shading_groups)) + for shading_group in shading_groups: + assignments[shading_group].append(shape) + + if components: + # Note: Components returned from maya.cmds.sets are "listed" as if + # being assigned to the transform like: pCube1.f[0] as opposed + # to pCubeShape1.f[0] so we correct that here too. + + # Build a mapping from parent to shapes to include in lookup. + transforms = {shape.rsplit("|", 1)[0]: shape for shape in shapes} + lookup = set(shapes) | set(transforms.keys()) + + component_assignments = defaultdict(list) + for shading_group in assignments.keys(): + members = cmds.ls(cmds.sets(shading_group, query=True), long=True) + for member in members: + + node = member.split(".", 1)[0] + if node not in lookup: + continue + + # Component + if "." in member: + + # Fix transform to shape as shaders are assigned to shapes + if node in transforms: + shape = transforms[node] + component = member.split(".", 1)[1] + member = "{0}.{1}".format(shape, component) + + component_assignments[shading_group].append(member) + assignments = component_assignments + + return dict(assignments) + + +@contextlib.contextmanager +def shader(nodes, shadingEngine="initialShadingGroup"): + """Assign a shader to nodes during the context""" + + shapes = cmds.ls(nodes, dag=1, objectsOnly=1, shapes=1, long=1) + original = get_shader_assignments_from_shapes(shapes) + + try: + # Assign override shader + if shapes: + cmds.sets(shapes, edit=True, forceElement=shadingEngine) + yield + finally: + + # Assign original shaders + for sg, members in original.items(): + if members: + cmds.sets(members, edit=True, forceElement=sg) + + +@contextlib.contextmanager +def displaySmoothness(nodes, + divisionsU=0, + divisionsV=0, + pointsWire=4, + pointsShaded=1, + polygonObject=1): + """Set the displaySmoothness during the context""" + + # Ensure only non-intermediate shapes + nodes = cmds.ls(nodes, + dag=1, + shapes=1, + long=1, + noIntermediate=True) + + def parse(node): + """Parse the current state of a node""" + state = {} + for key in ["divisionsU", + "divisionsV", + "pointsWire", + "pointsShaded", + "polygonObject"]: + value = cmds.displaySmoothness(node, query=1, **{key: True}) + if value is not None: + state[key] = value[0] + return state + + originals = dict((node, parse(node)) for node in nodes) + + try: + # Apply current state + cmds.displaySmoothness(nodes, + divisionsU=divisionsU, + divisionsV=divisionsV, + pointsWire=pointsWire, + pointsShaded=pointsShaded, + polygonObject=polygonObject) + yield + finally: + # Revert state + _iteritems = getattr(originals, "iteritems", originals.items) + for node, state in _iteritems(): + if state: + cmds.displaySmoothness(node, **state) + + +@contextlib.contextmanager +def no_display_layers(nodes): + """Ensure nodes are not in a displayLayer during context. + + Arguments: + nodes (list): The nodes to remove from any display layer. + + """ + + # Ensure long names + nodes = cmds.ls(nodes, long=True) + + # Get the original state + lookup = set(nodes) + original = {} + for layer in cmds.ls(type='displayLayer'): + + # Skip default layer + if layer == "defaultLayer": + continue + + members = cmds.editDisplayLayerMembers(layer, + query=True, + fullNames=True) + if not members: + continue + members = set(members) + + included = lookup.intersection(members) + if included: + original[layer] = list(included) + + try: + # Add all nodes to default layer + cmds.editDisplayLayerMembers("defaultLayer", nodes, noRecurse=True) + yield + finally: + # Restore original members + _iteritems = getattr(original, "iteritems", original.items) + for layer, members in _iteritems(): + cmds.editDisplayLayerMembers(layer, members, noRecurse=True) + + +@contextlib.contextmanager +def namespaced(namespace, new=True, relative_names=None): + """Work inside namespace during context + + Args: + new (bool): When enabled this will rename the namespace to a unique + namespace if the input namespace already exists. + + Yields: + str: The namespace that is used during the context + + """ + original = cmds.namespaceInfo(cur=True, absoluteName=True) + original_relative_names = cmds.namespace(query=True, relativeNames=True) + if new: + namespace = unique_namespace(namespace) + cmds.namespace(add=namespace) + if relative_names is not None: + cmds.namespace(relativeNames=relative_names) + try: + cmds.namespace(set=namespace) + yield namespace + finally: + cmds.namespace(set=original) + if relative_names is not None: + cmds.namespace(relativeNames=original_relative_names) + + +@contextlib.contextmanager +def maintained_selection_api(): + """Maintain selection using the Maya Python API. + + Warning: This is *not* added to the undo stack. + + """ + original = OpenMaya.MGlobal.getActiveSelectionList() + try: + yield + finally: + OpenMaya.MGlobal.setActiveSelectionList(original) + + +@contextlib.contextmanager +def tool(context): + """Set a tool context during the context manager. + + """ + original = cmds.currentCtx() + try: + cmds.setToolTo(context) + yield + finally: + cmds.setToolTo(original) + + +def polyConstraint(components, *args, **kwargs): + """Return the list of *components* with the constraints applied. + + A wrapper around Maya's `polySelectConstraint` to retrieve its results as + a list without altering selections. For a list of possible constraints + see `maya.cmds.polySelectConstraint` documentation. + + Arguments: + components (list): List of components of polygon meshes + + Returns: + list: The list of components filtered by the given constraints. + + """ + + kwargs.pop('mode', None) + + with no_undo(flush=False): + # Reverting selection to the original selection using + # `maya.cmds.select` can be slow in rare cases where previously + # `maya.cmds.polySelectConstraint` had set constrain to "All and Next" + # and the "Random" setting was activated. To work around this we + # revert to the original selection using the Maya API. This is safe + # since we're not generating any undo change anyway. + with tool("selectSuperContext"): + # Selection can be very slow when in a manipulator mode. + # So we force the selection context which is fast. + with maintained_selection_api(): + # Apply constraint using mode=2 (current and next) so + # it applies to the selection made before it; because just + # a `maya.cmds.select()` call will not trigger the constraint. + with reset_polySelectConstraint(): + cmds.select(components, r=1, noExpand=True) + cmds.polySelectConstraint(*args, mode=2, **kwargs) + result = cmds.ls(selection=True) + cmds.select(clear=True) + return result + + +@contextlib.contextmanager +def reset_polySelectConstraint(reset=True): + """Context during which the given polyConstraint settings are disabled. + + The original settings are restored after the context. + + """ + + original = cmds.polySelectConstraint(query=True, stateString=True) + + try: + if reset: + # Ensure command is available in mel + # This can happen when running standalone + if not mel.eval("exists resetPolySelectConstraint"): + mel.eval("source polygonConstraint") + + # Reset all parameters + mel.eval("resetPolySelectConstraint;") + cmds.polySelectConstraint(disable=True) + yield + finally: + mel.eval(original) + + +def is_visible(node, + displayLayer=True, + intermediateObject=True, + parentHidden=True, + visibility=True): + """Is `node` visible? + + Returns whether a node is hidden by one of the following methods: + - The node exists (always checked) + - The node must be a dagNode (always checked) + - The node's visibility is off. + - The node is set as intermediate Object. + - The node is in a disabled displayLayer. + - Whether any of its parent nodes is hidden. + + Roughly based on: http://ewertb.soundlinker.com/mel/mel.098.php + + Returns: + bool: Whether the node is visible in the scene + + """ + + # Only existing objects can be visible + if not cmds.objExists(node): + return False + + # Only dagNodes can be visible + if not cmds.objectType(node, isAType='dagNode'): + return False + + if visibility: + if not cmds.getAttr('{0}.visibility'.format(node)): + return False + + if intermediateObject and cmds.objectType(node, isAType='shape'): + if cmds.getAttr('{0}.intermediateObject'.format(node)): + return False + + if displayLayer: + # Display layers set overrideEnabled and overrideVisibility on members + if cmds.attributeQuery('overrideEnabled', node=node, exists=True): + override_enabled = cmds.getAttr('{}.overrideEnabled'.format(node)) + override_visibility = cmds.getAttr('{}.overrideVisibility'.format( + node)) + if override_enabled and not override_visibility: + return False + + if parentHidden: + parents = cmds.listRelatives(node, parent=True, fullPath=True) + if parents: + parent = parents[0] + if not is_visible(parent, + displayLayer=displayLayer, + intermediateObject=False, + parentHidden=parentHidden, + visibility=visibility): + return False + + return True + +# region ID +def get_id_required_nodes(referenced_nodes=False, + nodes=None, + existing_ids=True): + """Return nodes that should receive a `cbId` attribute. + + This includes only mesh and curve nodes, parent transforms of the shape + nodes, file texture nodes and object sets (including shading engines). + + This filters out any node which is locked, referenced, read-only, + intermediate object. + + Args: + referenced_nodes (bool): set True to include referenced nodes + nodes (list, Optional): nodes to consider + existing_ids (bool): set True to include nodes with `cbId` attribute + + Returns: + nodes (set): list of filtered nodes + """ + + if nodes is not None and not nodes: + # User supplied an empty `nodes` list to check so all we can + # do is return the empty result + return set() + + def _node_type_exists(node_type): + try: + cmds.nodeType(node_type, isTypeName=True) + return True + except RuntimeError: + return False + + def iterate(maya_iterator): + while not maya_iterator.isDone(): + yield maya_iterator.thisNode() + maya_iterator.next() + + # `readOnly` flag is obsolete as of Maya 2016 therefore we explicitly + # remove default nodes and reference nodes + default_camera_shapes = { + "frontShape", "sideShape", "topShape", "perspShape" + } + + # The filtered types do not include transforms because we only want the + # parent transforms that have a child shape that we filtered to, so we + # include the parents here + types = ["mesh", "nurbsCurve", "nurbsSurface", "file", "objectSet"] + + # Check if plugin nodes are available for Maya by checking if the plugin + # is loaded + if cmds.pluginInfo("pgYetiMaya", query=True, loaded=True): + types.append("pgYetiMaya") + + iterator_type = OpenMaya.MIteratorType() + # This tries to be closest matching API equivalents of `types` variable + iterator_type.filterList = [ + OpenMaya.MFn.kMesh, # mesh + OpenMaya.MFn.kNurbsSurface, # nurbsSurface + OpenMaya.MFn.kNurbsCurve, # nurbsCurve + OpenMaya.MFn.kFileTexture, # file + OpenMaya.MFn.kSet, # objectSet + OpenMaya.MFn.kPluginShape # pgYetiMaya + ] + it = OpenMaya.MItDependencyNodes(iterator_type) + + fn_dep = OpenMaya.MFnDependencyNode() + fn_dag = OpenMaya.MFnDagNode() + result = set() + + def _should_include_parents(obj): + """Whether to include parents of obj in output""" + if not obj.hasFn(OpenMaya.MFn.kShape): + return False + + fn_dag.setObject(obj) + if fn_dag.isIntermediateObject: + return False + + # Skip default cameras + if ( + obj.hasFn(OpenMaya.MFn.kCamera) and + fn_dag.name() in default_camera_shapes + ): + return False + + return True + + def _add_to_result_if_valid(obj): + """Add to `result` if the object should be included""" + fn_dep.setObject(obj) + if not existing_ids and fn_dep.hasAttribute("cbId"): + return + + if not referenced_nodes and fn_dep.isFromReferencedFile: + return + + if fn_dep.isDefaultNode: + return + + if fn_dep.isLocked: + return + + # Skip default cameras + if ( + obj.hasFn(OpenMaya.MFn.kCamera) and + fn_dep.name() in default_camera_shapes + ): + return + + if obj.hasFn(OpenMaya.MFn.kDagNode): + # DAG nodes + fn_dag.setObject(obj) + + # Skip intermediate objects + if fn_dag.isIntermediateObject: + return + + # DAG nodes can be instanced and thus may have multiple paths. + # We need to identify each path + paths = OpenMaya.MDagPath.getAllPathsTo(obj) + for dag in paths: + path = dag.fullPathName() + result.add(path) + else: + # Dependency node + path = fn_dep.name() + result.add(path) + + for obj in iterate(it): + # For any non-intermediate shape node always include the parent + # even if we exclude the shape itself (e.g. when locked, default) + if _should_include_parents(obj): + fn_dag.setObject(obj) + parents = [ + fn_dag.parent(index) for index in range(fn_dag.parentCount()) + ] + for parent_obj in parents: + _add_to_result_if_valid(parent_obj) + + _add_to_result_if_valid(obj) + + if not result: + return result + + # Exclude some additional types + exclude_types = [] + if _node_type_exists("ilrBakeLayer"): + # Remove Turtle from the result if Turtle is loaded + exclude_types.append("ilrBakeLayer") + + if exclude_types: + exclude_nodes = set(cmds.ls(nodes, long=True, type=exclude_types)) + if exclude_nodes: + result -= exclude_nodes + + # Filter to explicit input nodes if provided + if nodes is not None: + # The amount of input nodes to filter to can be large and querying + # many nodes can be slow in Maya. As such we want to try and reduce + # it as much as possible, so we include the type filter to try and + # reduce the result of `maya.cmds.ls` here. + nodes = set(cmds.ls(nodes, long=True, type=types + ["dagNode"])) + if nodes: + result &= nodes + else: + return set() + + return result + + +def get_id(node): + """Get the `cbId` attribute of the given node. + + Args: + node (str): the name of the node to retrieve the attribute from + Returns: + str + + """ + if node is None: + return + + sel = OpenMaya.MSelectionList() + sel.add(node) + + api_node = sel.getDependNode(0) + fn = OpenMaya.MFnDependencyNode(api_node) + + if not fn.hasAttribute("cbId"): + return + + try: + return fn.findPlug("cbId", False).asString() + except RuntimeError: + log.warning("Failed to retrieve cbId on %s", node) + return + + +def generate_ids(nodes, folder_id=None): + """Returns new unique ids for the given nodes. + + Note: This does not assign the new ids, it only generates the values. + + To assign new ids using this method: + >>> nodes = ["a", "b", "c"] + >>> for node, id in generate_ids(nodes): + >>> set_id(node, id) + + To also override any existing values (and assign regenerated ids): + >>> nodes = ["a", "b", "c"] + >>> for node, id in generate_ids(nodes): + >>> set_id(node, id, overwrite=True) + + Args: + nodes (list): List of nodes. + folder_id (Optional[str]): Folder id to generate id for. When None + provided current folder is used. + + Returns: + list: A list of (node, id) tuples. + + """ + + if folder_id is None: + # Get the folder id based on current context folder + project_name = get_current_project_name() + folder_path = get_current_folder_path() + if not folder_path: + raise ValueError("Current folder path is not set") + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields=["id"] + ) + if not folder_entity: + raise ValueError(( + "Current folder '{}' was not found on the server" + ).format(folder_path)) + folder_id = folder_entity["id"] + + node_ids = [] + for node in nodes: + _, uid = str(uuid.uuid4()).rsplit("-", 1) + unique_id = "{}:{}".format(folder_id, uid) + node_ids.append((node, unique_id)) + + return node_ids + + +def set_id(node, unique_id, overwrite=False): + """Add cbId to `node` unless one already exists. + + Args: + node (str): the node to add the "cbId" on + unique_id (str): The unique node id to assign. + This should be generated by `generate_ids`. + overwrite (bool, optional): When True overrides the current value even + if `node` already has an id. Defaults to False. + + Returns: + None + + """ + + exists = cmds.attributeQuery("cbId", node=node, exists=True) + + # Add the attribute if it does not exist yet + if not exists: + cmds.addAttr(node, longName="cbId", dataType="string") + + # Set the value + if not exists or overwrite: + attr = "{0}.cbId".format(node) + cmds.setAttr(attr, unique_id, type="string") + + +def get_attribute(plug, + asString=False, + expandEnvironmentVariables=False, + **kwargs): + """Maya getAttr with some fixes based on `pymel.core.general.getAttr()`. + + Like Pymel getAttr this applies some changes to `maya.cmds.getAttr` + - maya pointlessly returned vector results as a tuple wrapped in a list + (ex. '[(1,2,3)]'). This command unpacks the vector for you. + - when getting a multi-attr, maya would raise an error, but this will + return a list of values for the multi-attr + - added support for getting message attributes by returning the + connections instead + + Note that the asString + expandEnvironmentVariables argument naming + convention matches the `maya.cmds.getAttr` arguments so that it can + act as a direct replacement for it. + + Args: + plug (str): Node's attribute plug as `node.attribute` + asString (bool): Return string value for enum attributes instead + of the index. Note that the return value can be dependent on the + UI language Maya is running in. + expandEnvironmentVariables (bool): Expand any environment variable and + (tilde characters on UNIX) found in string attributes which are + returned. + + Kwargs: + Supports the keyword arguments of `maya.cmds.getAttr` + + Returns: + object: The value of the maya attribute. + + """ + attr_type = cmds.getAttr(plug, type=True) + if asString: + kwargs["asString"] = True + if expandEnvironmentVariables: + kwargs["expandEnvironmentVariables"] = True + try: + res = cmds.getAttr(plug, **kwargs) + except RuntimeError: + if attr_type == "message": + return cmds.listConnections(plug) + + node, attr = plug.split(".", 1) + children = cmds.attributeQuery(attr, node=node, listChildren=True) + if children: + return [ + get_attribute("{}.{}".format(node, child)) + for child in children + ] + + raise + + # Convert vector result wrapped in tuple + if isinstance(res, list) and len(res): + if isinstance(res[0], tuple) and len(res): + if attr_type in {'pointArray', 'vectorArray'}: + return res + return res[0] + + return res + + +def set_attribute(attribute, value, node): + """Adjust attributes based on the value from the attribute data + + If an attribute does not exists on the target it will be added with + the dataType being controlled by the value type. + + Args: + attribute (str): name of the attribute to change + value: the value to change to attribute to + node (str): name of the node + + Returns: + None + """ + + value_type = type(value).__name__ + kwargs = ATTRIBUTE_DICT[value_type] + if not cmds.attributeQuery(attribute, node=node, exists=True): + log.debug("Creating attribute '{}' on " + "'{}'".format(attribute, node)) + cmds.addAttr(node, longName=attribute, **kwargs) + + node_attr = "{}.{}".format(node, attribute) + enum_type = cmds.attributeQuery(attribute, node=node, enum=True) + if enum_type and value_type == "str": + enum_string_values = cmds.attributeQuery( + attribute, node=node, listEnum=True + )[0].split(":") + cmds.setAttr( + "{}.{}".format(node, attribute), enum_string_values.index(value) + ) + elif "dataType" in kwargs: + attr_type = kwargs["dataType"] + cmds.setAttr(node_attr, value, type=attr_type) + else: + cmds.setAttr(node_attr, value) + + +def apply_attributes(attributes, nodes_by_id): + """Alter the attributes to match the state when publishing + + Apply attribute settings from the publish to the node in the scene based + on the UUID which is stored in the cbId attribute. + + Args: + attributes (list): list of dictionaries + nodes_by_id (dict): collection of nodes based on UUID + {uuid: [node, node]} + + """ + + for attr_data in attributes: + nodes = nodes_by_id[attr_data["uuid"]] + attr_value = attr_data["attributes"] + for node in nodes: + for attr, value in attr_value.items(): + set_attribute(attr, value, node) + + +def is_valid_reference_node(reference_node): + """Return whether Maya considers the reference node a valid reference. + + Maya might report an error when using `maya.cmds.referenceQuery`: + Reference node 'reference_node' is not associated with a reference file. + + Note that this does *not* check whether the reference node points to an + existing file. Instead it only returns whether maya considers it valid + and thus is not an unassociated reference node + + Arguments: + reference_node (str): Reference node name + + Returns: + bool: Whether reference node is a valid reference + + """ + sel = OpenMaya.MSelectionList() + sel.add(reference_node) + depend_node = sel.getDependNode(0) + return OpenMaya.MFnReference(depend_node).isValidReference() + + +def get_container_members(container): + """Returns the members of a container. + This includes the nodes from any loaded references in the container. + """ + if isinstance(container, dict): + # Assume it's a container dictionary + container = container["objectName"] + + members = cmds.sets(container, query=True) or [] + members = cmds.ls(members, long=True, objectsOnly=True) or [] + all_members = set(members) + + # Include any referenced nodes from any reference in the container + # This is required since we've removed adding ALL nodes of a reference + # into the container set and only add the reference node now. + for ref in cmds.ls(members, exactType="reference", objectsOnly=True): + + # Ignore any `:sharedReferenceNode` + if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"): + continue + + # Ignore _UNKNOWN_REF_NODE_ (PLN-160) + if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): + continue + + try: + reference_members = cmds.referenceQuery(ref, + nodes=True, + dagPath=True) + except RuntimeError: + # Ignore reference nodes that are not associated with a + # referenced file on which `referenceQuery` command fails + if not is_valid_reference_node(ref): + continue + raise + reference_members = cmds.ls(reference_members, + long=True, + objectsOnly=True) + all_members.update(reference_members) + + return list(all_members) + + +# region LOOKDEV +def list_looks(project_name, folder_id): + """Return all look products for the given folder. + + This assumes all look products start with "look*" in their names. + + Returns: + list[dict[str, Any]]: List of look products. + + """ + return list(ayon_api.get_products( + project_name, folder_ids=[folder_id], product_types={"look"} + )) + + +def assign_look_by_version(nodes, version_id): + """Assign nodes a specific published look version by id. + + This assumes the nodes correspond with the asset. + + Args: + nodes(list): nodes to assign look to + version_id (bson.ObjectId): database id of the version + + Returns: + None + """ + + project_name = get_current_project_name() + + # Get representations of shader file and relationships + representations = list(ayon_api.get_representations( + project_name=project_name, + representation_names={"ma", "json"}, + version_ids=[version_id] + )) + look_representation = next( + repre for repre in representations if repre["name"] == "ma") + json_representation = next( + repre for repre in representations if repre["name"] == "json") + + # See if representation is already loaded, if so reuse it. + host = registered_host() + representation_id = look_representation["id"] + for container in host.ls(): + if (container['loader'] == "LookLoader" and + container['representation'] == representation_id): + log.info("Reusing loaded look ..") + container_node = container['objectName'] + break + else: + log.info("Using look for the first time ..") + + # Load file + _loaders = discover_loader_plugins() + loaders = loaders_from_representation(_loaders, representation_id) + Loader = next((i for i in loaders if i.__name__ == "LookLoader"), None) + if Loader is None: + raise RuntimeError("Could not find LookLoader, this is a bug") + + # Reference the look file + with maintained_selection(): + container_node = load_container(Loader, look_representation) + + # Get container members + shader_nodes = get_container_members(container_node) + + # Load relationships + shader_relation = get_representation_path(json_representation) + with open(shader_relation, "r") as f: + relationships = json.load(f) + + # Assign relationships + apply_shaders(relationships, shader_nodes, nodes) + + +def assign_look(nodes, product_name="lookMain"): + """Assigns a look to a node. + + Optimizes the nodes by grouping by folder id and finding + related product by name. + + Args: + nodes (list): all nodes to assign the look to + product_name (str): name of the product to find + """ + + # Group all nodes per folder id + grouped = defaultdict(list) + for node in nodes: + hash_id = get_id(node) + if not hash_id: + continue + + parts = hash_id.split(":", 1) + grouped[parts[0]].append(node) + + project_name = get_current_project_name() + product_entities = ayon_api.get_products( + project_name, product_names=[product_name], folder_ids=grouped.keys() + ) + product_entities_by_folder_id = { + product_entity["folderId"]: product_entity + for product_entity in product_entities + } + product_ids = { + product_entity["id"] + for product_entity in product_entities_by_folder_id.values() + } + last_version_entities_by_product_id = ayon_api.get_last_versions( + project_name, + product_ids + ) + + for folder_id, asset_nodes in grouped.items(): + product_entity = product_entities_by_folder_id.get(folder_id) + if not product_entity: + log.warning(( + "No product '{}' found for {}" + ).format(product_name, folder_id)) + continue + + product_id = product_entity["id"] + last_version = last_version_entities_by_product_id.get(product_id) + if not last_version: + log.warning(( + "Not found last version for product '{}' on folder with id {}" + ).format(product_name, folder_id)) + continue + + families = last_version.get("attrib", {}).get("families") or [] + if "look" not in families: + log.warning(( + "Last version for product '{}' on folder with id {}" + " does not have look product type" + ).format(product_name, folder_id)) + continue + + log.debug("Assigning look '{}' ".format( + product_name, last_version["version"])) + + assign_look_by_version(asset_nodes, last_version["id"]) + + +def apply_shaders(relationships, shadernodes, nodes): + """Link shadingEngine to the right nodes based on relationship data + + Relationship data is constructed of a collection of `sets` and `attributes` + `sets` corresponds with the shaderEngines found in the lookdev. + Each set has the keys `name`, `members` and `uuid`, the `members` + hold a collection of node information `name` and `uuid`. + + Args: + relationships (dict): relationship data + shadernodes (list): list of nodes of the shading objectSets (includes + VRayObjectProperties and shadingEngines) + nodes (list): list of nodes to apply shader to + + Returns: + None + """ + + attributes = relationships.get("attributes", []) + shader_data = relationships.get("relationships", {}) + + shading_engines = cmds.ls(shadernodes, type="objectSet", long=True) + assert shading_engines, "Error in retrieving objectSets from reference" + + # region compute lookup + nodes_by_id = defaultdict(list) + for node in nodes: + nodes_by_id[get_id(node)].append(node) + + shading_engines_by_id = defaultdict(list) + for shad in shading_engines: + shading_engines_by_id[get_id(shad)].append(shad) + # endregion + + # region assign shading engines and other sets + for data in shader_data.values(): + # collect all unique IDs of the set members + shader_uuid = data["uuid"] + member_uuids = [member["uuid"] for member in data["members"]] + + filtered_nodes = list() + for m_uuid in member_uuids: + filtered_nodes.extend(nodes_by_id[m_uuid]) + + id_shading_engines = shading_engines_by_id[shader_uuid] + if not id_shading_engines: + log.error("No shader found with cbId " + "'{}'".format(shader_uuid)) + continue + elif len(id_shading_engines) > 1: + log.error("Skipping shader assignment. " + "More than one shader found with cbId " + "'{}'. (found: {})".format(shader_uuid, + id_shading_engines)) + continue + + if not filtered_nodes: + log.warning("No nodes found for shading engine " + "'{0}'".format(id_shading_engines[0])) + continue + try: + cmds.sets(filtered_nodes, forceElement=id_shading_engines[0]) + except RuntimeError as rte: + log.error("Error during shader assignment: {}".format(rte)) + + # endregion + + apply_attributes(attributes, nodes_by_id) + + +# endregion LOOKDEV +def get_isolate_view_sets(): + """Return isolate view sets of all modelPanels. + + Returns: + list: all sets related to isolate view + + """ + + view_sets = set() + for panel in cmds.getPanel(type="modelPanel") or []: + view_set = cmds.modelEditor(panel, query=True, viewObjects=True) + if view_set: + view_sets.add(view_set) + + return view_sets + + +def get_related_sets(node): + """Return objectSets that are relationships for a look for `node`. + + Filters out based on: + - id attribute is NOT `AVALON_CONTAINER_ID` + - shapes and deformer shapes (alembic creates meshShapeDeformed) + - set name ends with any from a predefined list + - set in not in viewport set (isolate selected for example) + + Args: + node (str): name of the current node to check + + Returns: + list: The related sets + + """ + + sets = cmds.listSets(object=node, extendToShape=False) + if not sets: + return [] + + # Fix 'no object matches name' errors on nodes returned by listSets. + # In rare cases it can happen that a node is added to an internal maya + # set inaccessible by maya commands, for example check some nodes + # returned by `cmds.listSets(allSets=True)` + sets = cmds.ls(sets) + + # Ids to ignore + ignored = { + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + } + + # Ignore `avalon.container` + sets = [ + s for s in sets + if ( + not cmds.attributeQuery("id", node=s, exists=True) + or cmds.getAttr(f"{s}.id") not in ignored + ) + ] + if not sets: + return sets + + # Exclude deformer sets (`type=2` for `maya.cmds.listSets`) + exclude_sets = cmds.listSets(object=node, + extendToShape=False, + type=2) or [] + exclude_sets = set(exclude_sets) # optimize lookup + + # Default nodes to ignore + exclude_sets.update({"defaultLightSet", "defaultObjectSet"}) + + # Filter out the sets to exclude + sets = [s for s in sets if s not in exclude_sets] + + # Ignore when the set has a specific suffix + ignore_suffices = ("out_SET", "controls_SET", "_INST", "_CON") + sets = [s for s in sets if not s.endswith(ignore_suffices)] + if not sets: + return sets + + # Ignore viewport filter view sets (from isolate select and + # viewports) + view_sets = get_isolate_view_sets() + sets = [s for s in sets if s not in view_sets] + + return sets + + +def get_container_transforms(container, members=None, root=False): + """Retrieve the root node of the container content + + When a container is created through a Loader the content + of the file will be grouped under a transform. The name of the root + transform is stored in the container information + + Args: + container (dict): the container + members (list): optional and convenience argument + root (bool): return highest node in hierarchy if True + + Returns: + root (list / str): + """ + + if not members: + members = get_container_members(container) + + results = cmds.ls(members, type="transform", long=True) + if root: + root = get_highest_in_hierarchy(results) + if root: + results = root[0] + + return results + + +def get_highest_in_hierarchy(nodes): + """Return highest nodes in the hierarchy that are in the `nodes` list. + + The "highest in hierarchy" are the nodes closest to world: top-most level. + + Args: + nodes (list): The nodes in which find the highest in hierarchies. + + Returns: + list: The highest nodes from the input nodes. + + """ + + # Ensure we use long names + nodes = cmds.ls(nodes, long=True) + lookup = set(nodes) + + highest = [] + for node in nodes: + # If no parents are within the nodes input list + # then this is a highest node + if not any(n in lookup for n in iter_parents(node)): + highest.append(node) + + return highest + + +def iter_parents(node): + """Iter parents of node from its long name. + + Note: The `node` *must* be the long node name. + + Args: + node (str): Node long name. + + Yields: + str: All parent node names (long names) + + """ + while True: + split = node.rsplit("|", 1) + if len(split) == 1 or not split[0]: + return + + node = split[0] + yield node + + +def remove_other_uv_sets(mesh): + """Remove all other UV sets than the current UV set. + + Keep only current UV set and ensure it's the renamed to default 'map1'. + + """ + + uvSets = cmds.polyUVSet(mesh, query=True, allUVSets=True) + current = cmds.polyUVSet(mesh, query=True, currentUVSet=True)[0] + + # Copy over to map1 + if current != 'map1': + cmds.polyUVSet(mesh, uvSet=current, newUVSet='map1', copy=True) + cmds.polyUVSet(mesh, currentUVSet=True, uvSet='map1') + current = 'map1' + + # Delete all non-current UV sets + deleteUVSets = [uvSet for uvSet in uvSets if uvSet != current] + uvSet = None + + # Maya Bug (tested in 2015/2016): + # In some cases the API's MFnMesh will report less UV sets than + # maya.cmds.polyUVSet. This seems to happen when the deletion of UV sets + # has not triggered a cleanup of the UVSet array attribute on the mesh + # node. It will still have extra entries in the attribute, though it will + # not show up in API or UI. Nevertheless it does show up in + # maya.cmds.polyUVSet. To ensure we clean up the array we'll force delete + # the extra remaining 'indices' that we don't want. + + # TODO: Implement a better fix + # The best way to fix would be to get the UVSet indices from api with + # MFnMesh (to ensure we keep correct ones) and then only force delete the + # other entries in the array attribute on the node. But for now we're + # deleting all entries except first one. Note that the first entry could + # never be removed (the default 'map1' always exists and is supposed to + # be undeletable.) + try: + for uvSet in deleteUVSets: + cmds.polyUVSet(mesh, delete=True, uvSet=uvSet) + except RuntimeError as exc: + log.warning('Error uvSet: %s - %s', uvSet, exc) + indices = cmds.getAttr('{0}.uvSet'.format(mesh), + multiIndices=True) + if not indices: + log.warning("No uv set found indices for: %s", mesh) + return + + # Delete from end to avoid shifting indices + # and remove the indices in the attribute + indices = reversed(indices[1:]) + for i in indices: + attr = '{0}.uvSet[{1}]'.format(mesh, i) + cmds.removeMultiInstance(attr, b=True) + + +def get_node_parent(node): + """Return full path name for parent of node""" + parents = cmds.listRelatives(node, parent=True, fullPath=True) + return parents[0] if parents else None + + +def get_id_from_sibling(node, history_only=True): + """Return first node id in the history chain that matches this node. + + The nodes in history must be of the exact same node type and must be + parented under the same parent. + + Optionally, if no matching node is found from the history, all the + siblings of the node that are of the same type are checked. + Additionally to having the same parent, the sibling must be marked as + 'intermediate object'. + + Args: + node (str): node to retrieve the history from + history_only (bool): if True and if nothing found in history, + look for an 'intermediate object' in all the node's siblings + of same type + + Returns: + str or None: The id from the sibling node or None when no id found + on any valid nodes in the history or siblings. + + """ + + node = cmds.ls(node, long=True)[0] + + # Find all similar nodes in history + history = cmds.listHistory(node) + node_type = cmds.nodeType(node) + similar_nodes = cmds.ls(history, exactType=node_type, long=True) + + # Exclude itself + similar_nodes = [x for x in similar_nodes if x != node] + + # The node *must be* under the same parent + parent = get_node_parent(node) + similar_nodes = [i for i in similar_nodes if get_node_parent(i) == parent] + + # Check all of the remaining similar nodes and take the first one + # with an id and assume it's the original. + for similar_node in similar_nodes: + _id = get_id(similar_node) + if _id: + return _id + + if not history_only: + # Get siblings of same type + similar_nodes = cmds.listRelatives(parent, + type=node_type, + fullPath=True) + similar_nodes = cmds.ls(similar_nodes, exactType=node_type, long=True) + + # Exclude itself + similar_nodes = [x for x in similar_nodes if x != node] + + # Get all unique ids from siblings in order since + # we consistently take the first one found + sibling_ids = OrderedDict() + for similar_node in similar_nodes: + # Check if "intermediate object" + if not cmds.getAttr(similar_node + ".intermediateObject"): + continue + + _id = get_id(similar_node) + if not _id: + continue + + if _id in sibling_ids: + sibling_ids[_id].append(similar_node) + else: + sibling_ids[_id] = [similar_node] + + if sibling_ids: + first_id, found_nodes = next(iter(sibling_ids.items())) + + # Log a warning if we've found multiple unique ids + if len(sibling_ids) > 1: + log.warning(("Found more than 1 intermediate shape with" + " unique id for '{}'. Using id of first" + " found: '{}'".format(node, found_nodes[0]))) + + return first_id + + +def set_scene_fps(fps, update=True): + """Set FPS from project configuration + + Args: + fps (int, float): desired FPS + update(bool): toggle update animation, default is True + + Returns: + None + + """ + + fps_mapping = { + '2': '2fps', + '3': '3fps', + '4': '4fps', + '5': '5fps', + '6': '6fps', + '8': '8fps', + '10': '10fps', + '12': '12fps', + '15': 'game', + '16': '16fps', + '24': 'film', + '25': 'pal', + '30': 'ntsc', + '48': 'show', + '50': 'palf', + '60': 'ntscf', + '23.976023976023978': '23.976fps', + '29.97002997002997': '29.97fps', + '47.952047952047955': '47.952fps', + '59.94005994005994': '59.94fps', + '44100': '44100fps', + '48000': '48000fps' + } + + unit = fps_mapping.get(str(convert_to_maya_fps(fps)), None) + if unit is None: + raise ValueError("Unsupported FPS value: `%s`" % fps) + + # Get time slider current state + start_frame = cmds.playbackOptions(query=True, minTime=True) + end_frame = cmds.playbackOptions(query=True, maxTime=True) + + # Get animation data + animation_start = cmds.playbackOptions(query=True, animationStartTime=True) + animation_end = cmds.playbackOptions(query=True, animationEndTime=True) + + current_frame = cmds.currentTime(query=True) + + log.info("Setting scene FPS to: '{}'".format(unit)) + cmds.currentUnit(time=unit, updateAnimation=update) + + # Set time slider data back to previous state + cmds.playbackOptions(minTime=start_frame, + maxTime=end_frame, + animationStartTime=animation_start, + animationEndTime=animation_end) + + cmds.currentTime(current_frame, edit=True, update=True) + + # Force file stated to 'modified' + cmds.file(modified=True) + + +def set_scene_resolution(width, height, pixelAspect): + """Set the render resolution + + Args: + width(int): value of the width + height(int): value of the height + + Returns: + None + + """ + + control_node = "defaultResolution" + current_renderer = cmds.getAttr("defaultRenderGlobals.currentRenderer") + aspect_ratio_attr = "deviceAspectRatio" + + # Give VRay a helping hand as it is slightly different from the rest + if current_renderer == "vray": + aspect_ratio_attr = "aspectRatio" + vray_node = "vraySettings" + if cmds.objExists(vray_node): + control_node = vray_node + else: + log.error("Can't set VRay resolution because there is no node " + "named: `%s`" % vray_node) + + log.info("Setting scene resolution to: %s x %s" % (width, height)) + cmds.setAttr("%s.width" % control_node, width) + cmds.setAttr("%s.height" % control_node, height) + + deviceAspectRatio = ((float(width) / float(height)) * float(pixelAspect)) + cmds.setAttr( + "{}.{}".format(control_node, aspect_ratio_attr), deviceAspectRatio) + cmds.setAttr("%s.pixelAspect" % control_node, pixelAspect) + + +def get_fps_for_current_context(): + """Get fps that should be set for current context. + + Todos: + - Skip project value. + - Merge logic with 'get_frame_range' and 'reset_scene_resolution' -> + all the values in the functions can be collected at one place as + they have same requirements. + + Returns: + Union[int, float]: FPS value. + """ + task_entity = get_current_task_entity(fields={"attrib"}) + fps = task_entity.get("attrib", {}).get("fps") + if not fps: + project_name = get_current_project_name() + folder_path = get_current_folder_path() + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"attrib.fps"} + ) or {} + + fps = folder_entity.get("attrib", {}).get("fps") + if not fps: + project_entity = ayon_api.get_project( + project_name, fields=["attrib.fps"] + ) or {} + fps = project_entity.get("attrib", {}).get("fps") + + if not fps: + fps = 25 + + return convert_to_maya_fps(fps) + + +def get_frame_range(include_animation_range=False): + """Get the current task frame range and handles. + + Args: + include_animation_range (bool, optional): Whether to include + `animationStart` and `animationEnd` keys to define the outer + range of the timeline. It is excluded by default. + + Returns: + dict: Task's expected frame range values. + + """ + + # Set frame start/end + project_name = get_current_project_name() + folder_path = get_current_folder_path() + task_name = get_current_task_name() + + folder_entity = ayon_api.get_folder_by_path( + project_name, + folder_path, + fields={"id"}) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + task_attributes = task_entity["attrib"] + + frame_start = task_attributes.get("frameStart") + frame_end = task_attributes.get("frameEnd") + + if frame_start is None or frame_end is None: + cmds.warning("No edit information found for '{}'".format(folder_path)) + return + + handle_start = task_attributes.get("handleStart") or 0 + handle_end = task_attributes.get("handleEnd") or 0 + + frame_range = { + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": handle_start, + "handleEnd": handle_end + } + if include_animation_range: + # The animation range values are only included to define whether + # the Maya time slider should include the handles or not. + # Some usages of this function use the full dictionary to define + # instance attributes for which we want to exclude the animation + # keys. That is why these are excluded by default. + + settings = get_project_settings(project_name) + + task_type = task_entity["taskType"] + + include_handles_settings = settings["maya"]["include_handles"] + + animation_start = frame_start + animation_end = frame_end + + include_handles = include_handles_settings["include_handles_default"] + for item in include_handles_settings["per_task_type"]: + if task_type in item["task_type"]: + include_handles = item["include_handles"] + break + if include_handles: + animation_start -= int(handle_start) + animation_end += int(handle_end) + + frame_range["animationStart"] = animation_start + frame_range["animationEnd"] = animation_end + + return frame_range + + +def reset_frame_range(playback=True, render=True, fps=True): + """Set frame range to current folder. + + Args: + playback (bool, Optional): Whether to set the maya timeline playback + frame range. Defaults to True. + render (bool, Optional): Whether to set the maya render frame range. + Defaults to True. + fps (bool, Optional): Whether to set scene FPS. Defaults to True. + """ + if fps: + set_scene_fps(get_fps_for_current_context()) + + frame_range = get_frame_range(include_animation_range=True) + if not frame_range: + # No frame range data found for folder + return + + frame_start = frame_range["frameStart"] + frame_end = frame_range["frameEnd"] + animation_start = frame_range["animationStart"] + animation_end = frame_range["animationEnd"] + + if playback: + cmds.playbackOptions( + minTime=frame_start, + maxTime=frame_end, + animationStartTime=animation_start, + animationEndTime=animation_end + ) + cmds.currentTime(frame_start) + + if render: + cmds.setAttr("defaultRenderGlobals.startFrame", animation_start) + cmds.setAttr("defaultRenderGlobals.endFrame", animation_end) + + +def reset_scene_resolution(): + """Apply the scene resolution from the project definition + + The scene resolution will be retrieved from the current task entity's + attributes. + + Returns: + None + """ + + task_attributes = get_current_task_entity(fields={"attrib"})["attrib"] + + # Set resolution + width = task_attributes.get("resolutionWidth", 1920) + height = task_attributes.get("resolutionHeight", 1080) + pixel_aspect = task_attributes.get("pixelAspect", 1) + + set_scene_resolution(width, height, pixel_aspect) + + +def set_context_settings( + fps=True, + resolution=True, + frame_range=True, + colorspace=True +): + """Apply the project settings from the project definition + + Settings can be overwritten by an asset if the asset.data contains + any information regarding those settings. + + Args: + fps (bool): Whether to set the scene FPS. + resolution (bool): Whether to set the render resolution. + frame_range (bool): Whether to reset the time slide frame ranges. + colorspace (bool): Whether to reset the colorspace. + + Returns: + None + + """ + if fps: + # Set project fps + set_scene_fps(get_fps_for_current_context()) + + if resolution: + reset_scene_resolution() + + # Set frame range. + if frame_range: + reset_frame_range(fps=False) + + # Set colorspace + if colorspace: + set_colorspace() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset workfile FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset workfile start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Resolution", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "colorspace", + label="Colorspace", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + with suspended_refresh(): + set_context_settings( + fps=options["fps"], + resolution=options["resolution"], + frame_range=options["frame_range"], + colorspace=options["colorspace"] + ) + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() + + +# Valid FPS +def validate_fps(): + """Validate current scene FPS and show pop-up when it is incorrect + + Returns: + bool + + """ + + expected_fps = get_fps_for_current_context() + current_fps = mel.eval("currentTimeUnitToFPS()") + + fps_match = current_fps == expected_fps + if not fps_match and not IS_HEADLESS: + from ayon_core.tools.utils import PopupUpdateKeys + + parent = get_main_window() + + dialog = PopupUpdateKeys(parent=parent) + dialog.setModal(True) + dialog.setWindowTitle("Maya scene does not match project FPS") + dialog.set_message( + "Scene {} FPS does not match project {} FPS".format( + current_fps, expected_fps + ) + ) + dialog.set_button_text("Fix") + + # Set new text for button (add optional argument for the popup?) + def on_click(update): + set_scene_fps(expected_fps, update) + + dialog.on_clicked_state.connect(on_click) + dialog.show() + + return False + + return fps_match + + +def bake(nodes, + frame_range=None, + step=1.0, + simulation=True, + preserve_outside_keys=False, + disable_implicit_control=True, + shape=True): + """Bake the given nodes over the time range. + + This will bake all attributes of the node, including custom attributes. + + Args: + nodes (list): Names of transform nodes, eg. camera, light. + frame_range (list): frame range with start and end frame. + or if None then takes timeSliderRange + simulation (bool): Whether to perform a full simulation of the + attributes over time. + preserve_outside_keys (bool): Keep keys that are outside of the baked + range. + disable_implicit_control (bool): When True will disable any + constraints to the object. + shape (bool): When True also bake attributes on the children shapes. + step (float): The step size to sample by. + + Returns: + None + + """ + + # Parse inputs + if not nodes: + return + + assert isinstance(nodes, (list, tuple)), "Nodes must be a list or tuple" + + # If frame range is None fall back to time slider playback time range + if frame_range is None: + frame_range = [cmds.playbackOptions(query=True, minTime=True), + cmds.playbackOptions(query=True, maxTime=True)] + + # If frame range is single frame bake one frame more, + # otherwise maya.cmds.bakeResults gets confused + if frame_range[1] == frame_range[0]: + frame_range[1] += 1 + + # Bake it + with keytangent_default(in_tangent_type='auto', + out_tangent_type='auto'): + cmds.bakeResults(nodes, + simulation=simulation, + preserveOutsideKeys=preserve_outside_keys, + disableImplicitControl=disable_implicit_control, + shape=shape, + sampleBy=step, + time=(frame_range[0], frame_range[1])) + + +def bake_to_world_space(nodes, + frame_range=None, + simulation=True, + preserve_outside_keys=False, + disable_implicit_control=True, + shape=True, + step=1.0): + """Bake the nodes to world space transformation (incl. other attributes) + + Bakes the transforms to world space (while maintaining all its animated + attributes and settings) by duplicating the node. Then parents it to world + and constrains to the original. + + Other attributes are also baked by connecting all attributes directly. + Baking is then done using Maya's bakeResults command. + + See `bake` for the argument documentation. + + Returns: + list: The newly created and baked node names. + + """ + @contextlib.contextmanager + def _unlock_attr(attr): + """Unlock attribute during context if it is locked""" + if not cmds.getAttr(attr, lock=True): + # If not locked, do nothing + yield + return + try: + cmds.setAttr(attr, lock=False) + yield + finally: + cmds.setAttr(attr, lock=True) + + def _get_attrs(node): + """Workaround for buggy shape attribute listing with listAttr + + This will only return keyable settable attributes that have an + incoming connections (those that have a reason to be baked). + + Technically this *may* fail to return attributes driven by complex + expressions for which maya makes no connections, e.g. doing actual + `setAttr` calls in expressions. + + Arguments: + node (str): The node to list attributes for. + + Returns: + list: Keyable attributes with incoming connections. + The attribute may be locked. + + """ + attrs = cmds.listAttr(node, + write=True, + scalar=True, + settable=True, + connectable=True, + keyable=True, + shortNames=True) or [] + valid_attrs = [] + for attr in attrs: + node_attr = '{0}.{1}'.format(node, attr) + + # Sometimes Maya returns 'non-existent' attributes for shapes + # so we filter those out + if not cmds.attributeQuery(attr, node=node, exists=True): + continue + + # We only need those that have a connection, just to be safe + # that it's actually keyable/connectable anyway. + if cmds.connectionInfo(node_attr, + isDestination=True): + valid_attrs.append(attr) + + return valid_attrs + + transform_attrs = {"t", "r", "s", + "tx", "ty", "tz", + "rx", "ry", "rz", + "sx", "sy", "sz"} + + world_space_nodes = [] + with ExitStack() as stack: + delete_bin = stack.enter_context(delete_after()) + # Create the duplicate nodes that are in world-space connected to + # the originals + for node in nodes: + + # Duplicate the node + short_name = node.rsplit("|", 1)[-1] + new_name = "{0}_baked".format(short_name) + new_node = cmds.duplicate(node, + name=new_name, + renameChildren=True)[0] # noqa + + # Parent new node to world + if cmds.listRelatives(new_node, parent=True): + new_node = cmds.parent(new_node, world=True)[0] + + # Temporarily unlock and passthrough connect all attributes + # so we can bake them over time + # Skip transform attributes because we will constrain them later + attrs = set(_get_attrs(node)) - transform_attrs + for attr in attrs: + orig_node_attr = "{}.{}".format(node, attr) + new_node_attr = "{}.{}".format(new_node, attr) + + # unlock during context to avoid connection errors + stack.enter_context(_unlock_attr(new_node_attr)) + cmds.connectAttr(orig_node_attr, + new_node_attr, + force=True) + + # If shapes are also baked then also temporarily unlock and + # passthrough connect all shape attributes for baking + if shape: + children_shapes = cmds.listRelatives(new_node, + children=True, + fullPath=True, + shapes=True) + if children_shapes: + orig_children_shapes = cmds.listRelatives(node, + children=True, + fullPath=True, + shapes=True) + for orig_shape, new_shape in zip(orig_children_shapes, + children_shapes): + attrs = _get_attrs(orig_shape) + for attr in attrs: + orig_node_attr = "{}.{}".format(orig_shape, attr) + new_node_attr = "{}.{}".format(new_shape, attr) + + # unlock during context to avoid connection errors + stack.enter_context(_unlock_attr(new_node_attr)) + cmds.connectAttr(orig_node_attr, + new_node_attr, + force=True) + + # Constraint transforms + for attr in transform_attrs: + transform_attr = "{}.{}".format(new_node, attr) + stack.enter_context(_unlock_attr(transform_attr)) + delete_bin.extend(cmds.parentConstraint(node, new_node, mo=False)) + delete_bin.extend(cmds.scaleConstraint(node, new_node, mo=False)) + + world_space_nodes.append(new_node) + + bake(world_space_nodes, + frame_range=frame_range, + step=step, + simulation=simulation, + preserve_outside_keys=preserve_outside_keys, + disable_implicit_control=disable_implicit_control, + shape=shape) + + return world_space_nodes + + +def load_capture_preset(data): + """Convert AYON Extract Playblast settings to `capture` arguments + + Input data is the settings from: + `project_settings/maya/publish/ExtractPlayblast/capture_preset` + + Args: + data (dict): Capture preset settings from AYON settings + + Returns: + dict: `capture.capture` compatible keyword arguments + + """ + + options = dict() + viewport_options = dict() + viewport2_options = dict() + camera_options = dict() + + # Straight key-value match from settings to capture arguments + options.update(data["Codec"]) + options.update(data["Generic"]) + options.update(data["Resolution"]) + + camera_options.update(data["CameraOptions"]) + viewport_options.update(data["Renderer"]) + + # DISPLAY OPTIONS + disp_options = {} + for key, value in data["DisplayOptions"].items(): + if key.startswith("background"): + # Convert background, backgroundTop, backgroundBottom colors + + if len(value) == 4: + # Ignore alpha + convert RGB to float + value = [ + float(value[0]) / 255, + float(value[1]) / 255, + float(value[2]) / 255 + ] + disp_options[key] = value + elif key == "displayGradient": + disp_options[key] = value + + options["display_options"] = disp_options + + # Viewport Options has a mixture of Viewport2 Options and Viewport Options + # to pass along to capture. So we'll need to differentiate between the two + VIEWPORT2_OPTIONS = { + "textureMaxResolution", + "renderDepthOfField", + "ssaoEnable", + "ssaoSamples", + "ssaoAmount", + "ssaoRadius", + "ssaoFilterRadius", + "hwFogStart", + "hwFogEnd", + "hwFogAlpha", + "hwFogFalloff", + "hwFogColorR", + "hwFogColorG", + "hwFogColorB", + "hwFogDensity", + "motionBlurEnable", + "motionBlurSampleCount", + "motionBlurShutterOpenFraction", + "lineAAEnable" + } + for key, value in data["ViewportOptions"].items(): + + # There are some keys we want to ignore + if key in {"override_viewport_options", "high_quality"}: + continue + + # First handle special cases where we do value conversion to + # separate option values + if key == 'textureMaxResolution': + viewport2_options['textureMaxResolution'] = value + if value > 0: + viewport2_options['enableTextureMaxRes'] = True + viewport2_options['textureMaxResMode'] = 1 + else: + viewport2_options['enableTextureMaxRes'] = False + viewport2_options['textureMaxResMode'] = 0 + + elif key == 'multiSample': + viewport2_options['multiSampleEnable'] = value > 0 + viewport2_options['multiSampleCount'] = value + + elif key == 'alphaCut': + viewport2_options['transparencyAlgorithm'] = 5 + viewport2_options['transparencyQuality'] = 1 + + elif key == 'hwFogFalloff': + # Settings enum value string to integer + viewport2_options['hwFogFalloff'] = int(value) + + # Then handle Viewport 2.0 Options + elif key in VIEWPORT2_OPTIONS: + viewport2_options[key] = value + + # Then assume remainder is Viewport Options + else: + viewport_options[key] = value + + options['viewport_options'] = viewport_options + options['viewport2_options'] = viewport2_options + options['camera_options'] = camera_options + + # use active sound track + scene = capture.parse_active_scene() + options['sound'] = scene['sound'] + + return options + + +def get_attr_in_layer(attr, layer, as_string=True): + """Return attribute value in specified renderlayer. + + Same as cmds.getAttr but this gets the attribute's value in a + given render layer without having to switch to it. + + Warning for parent attribute overrides: + Attributes that have render layer overrides to their parent attribute + are not captured correctly since they do not have a direct connection. + For example, an override to sphere.rotate when querying sphere.rotateX + will not return correctly! + + Note: This is much faster for Maya's renderLayer system, yet the code + does no optimized query for render setup. + + Args: + attr (str): attribute name, ex. "node.attribute" + layer (str): layer name + as_string (bool): whether attribute should convert to a string value + + Returns: + The return value from `maya.cmds.getAttr` + + """ + + try: + if cmds.mayaHasRenderSetup(): + from . import lib_rendersetup + return lib_rendersetup.get_attr_in_layer( + attr, layer, as_string=as_string) + except AttributeError: + pass + + # Ignore complex query if we're in the layer anyway + current_layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + if layer == current_layer: + return cmds.getAttr(attr, asString=as_string) + + connections = cmds.listConnections(attr, + plugs=True, + source=False, + destination=True, + type="renderLayer") or [] + connections = filter(lambda x: x.endswith(".plug"), connections) + if not connections: + return cmds.getAttr(attr) + + # Some value types perform a conversion when assigning + # TODO: See if there's a maya method to allow this conversion + # instead of computing it ourselves. + attr_type = cmds.getAttr(attr, type=True) + conversion = None + if attr_type == "time": + conversion = mel.eval('currentTimeUnitToFPS()') # returns float + elif attr_type == "doubleAngle": + # Radians to Degrees: 180 / pi + # TODO: This will likely only be correct when Maya units are set + # to degrees + conversion = 57.2957795131 + elif attr_type == "doubleLinear": + raise NotImplementedError("doubleLinear conversion not implemented.") + + for connection in connections: + if connection.startswith(layer + "."): + attr_split = connection.split(".") + if attr_split[0] == layer: + attr = ".".join(attr_split[0:-1]) + value = cmds.getAttr("%s.value" % attr) + if conversion: + value *= conversion + return value + + else: + # When connections are present, but none + # to the specific renderlayer than the layer + # should have the "defaultRenderLayer"'s value + layer = "defaultRenderLayer" + for connection in connections: + if connection.startswith(layer): + attr_split = connection.split(".") + if attr_split[0] == "defaultRenderLayer": + attr = ".".join(attr_split[0:-1]) + value = cmds.getAttr("%s.value" % attr) + if conversion: + value *= conversion + return value + + return cmds.getAttr(attr, asString=as_string) + + +def fix_incompatible_containers(): + """Backwards compatibility: old containers to use new ReferenceLoader""" + old_loaders = { + "MayaAsciiLoader", + "AbcLoader", + "ModelLoader", + "CameraLoader", + "RigLoader", + "FBXLoader" + } + host = registered_host() + for container in host.ls(): + loader = container['loader'] + if loader in old_loaders: + log.info( + "Converting legacy container loader {} to " + "ReferenceLoader: {}".format(loader, container["objectName"]) + ) + cmds.setAttr(container["objectName"] + ".loader", + "ReferenceLoader", type="string") + + +def update_content_on_context_change(): + """ + This will update scene content to match new folder on context change + """ + + host = registered_host() + create_context = CreateContext(host) + folder_entity = get_current_task_entity(fields={"attrib"}) + + instance_values = { + "folderPath": create_context.get_current_folder_path(), + "task": create_context.get_current_task_name(), + } + creator_attribute_values = { + "frameStart": folder_entity["attrib"]["frameStart"], + "frameEnd": folder_entity["attrib"]["frameEnd"], + } + + has_changes = False + for instance in create_context.instances: + for key, value in instance_values.items(): + if key not in instance or instance[key] == value: + continue + + # Update instance value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + creator_attributes = instance.creator_attributes + for key, value in creator_attribute_values.items(): + if ( + key not in creator_attributes + or creator_attributes[key] == value + ): + continue + + # Update instance creator attribute value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + if has_changes: + create_context.save_changes() + + +def show_message(title, msg): + from qtpy import QtWidgets + from ayon_core.tools.utils import show_message_dialog + + # Find maya main window + top_level_widgets = {w.objectName(): w for w in + QtWidgets.QApplication.topLevelWidgets()} + + parent = top_level_widgets.get("MayaWindow", None) + if parent is not None: + show_message_dialog(title=title, message=msg, parent=parent) + + +def iter_shader_edits(relationships, shader_nodes, nodes_by_id, label=None): + """Yield edits as a set of actions.""" + + attributes = relationships.get("attributes", []) + shader_data = relationships.get("relationships", {}) + + shading_engines = cmds.ls(shader_nodes, type="objectSet", long=True) + assert shading_engines, "Error in retrieving objectSets from reference" + + # region compute lookup + shading_engines_by_id = defaultdict(list) + for shad in shading_engines: + shading_engines_by_id[get_id(shad)].append(shad) + # endregion + + # region assign shading engines and other sets + for data in shader_data.values(): + # collect all unique IDs of the set members + shader_uuid = data["uuid"] + member_uuids = [ + (member["uuid"], member.get("components")) + for member in data["members"]] + + filtered_nodes = list() + for _uuid, components in member_uuids: + nodes = nodes_by_id.get(_uuid, None) + if nodes is None: + continue + + if components: + # Assign to the components + nodes = [".".join([node, components]) for node in nodes] + + filtered_nodes.extend(nodes) + + id_shading_engines = shading_engines_by_id[shader_uuid] + if not id_shading_engines: + log.error("{} - No shader found with cbId " + "'{}'".format(label, shader_uuid)) + continue + elif len(id_shading_engines) > 1: + log.error("{} - Skipping shader assignment. " + "More than one shader found with cbId " + "'{}'. (found: {})".format(label, shader_uuid, + id_shading_engines)) + continue + + if not filtered_nodes: + log.warning("{} - No nodes found for shading engine " + "'{}'".format(label, id_shading_engines[0])) + continue + + yield {"action": "assign", + "uuid": data["uuid"], + "nodes": filtered_nodes, + "shader": id_shading_engines[0]} + + for data in attributes: + nodes = nodes_by_id.get(data["uuid"], []) + attr_value = data["attributes"] + yield {"action": "setattr", + "uuid": data["uuid"], + "nodes": nodes, + "attributes": attr_value} + + +def set_colorspace(): + """Set Colorspace from project configuration""" + + project_name = get_current_project_name() + imageio = get_project_settings(project_name)["maya"]["imageio"] + + # ocio compatibility variables + ocio_v2_maya_version = 2022 + maya_version = int(cmds.about(version=True)) + ocio_v2_support = use_ocio_v2 = maya_version >= ocio_v2_maya_version + is_ocio_set = bool(os.environ.get("OCIO")) + + use_workfile_settings = imageio.get("workfile", {}).get("enabled") + if use_workfile_settings: + root_dict = imageio["workfile"] + else: + # TODO: deprecated code from 3.15.5 - remove + # Maya 2022+ introduces new OCIO v2 color management settings that + # can override the old color management preferences. AYON has + # separate settings for both so we fall back when necessary. + use_ocio_v2 = imageio["colorManagementPreference_v2"]["enabled"] + if use_ocio_v2 and not ocio_v2_support: + # Fallback to legacy behavior with a warning + log.warning( + "Color Management Preference v2 is enabled but not " + "supported by current Maya version: {} (< {}). Falling " + "back to legacy settings.".format( + maya_version, ocio_v2_maya_version) + ) + + if use_ocio_v2: + root_dict = imageio["colorManagementPreference_v2"] + else: + root_dict = imageio["colorManagementPreference"] + + if not isinstance(root_dict, dict): + msg = "set_colorspace(): argument should be dictionary" + log.error(msg) + return + + # backward compatibility + # TODO: deprecated code from 3.15.5 - remove with deprecated code above + view_name = root_dict.get("viewTransform") + if view_name is None: + view_name = root_dict.get("viewName") + + log.debug(">> root_dict: {}".format(pformat(root_dict))) + if not root_dict: + return + + # set color spaces for rendering space and view transforms + def _colormanage(**kwargs): + """Wrapper around `cmds.colorManagementPrefs`. + + This logs errors instead of raising an error so color management + settings get applied as much as possible. + + """ + assert len(kwargs) == 1, "Must receive one keyword argument" + try: + cmds.colorManagementPrefs(edit=True, **kwargs) + log.debug("Setting Color Management Preference: {}".format(kwargs)) + except RuntimeError as exc: + log.error(exc) + + # enable color management + cmds.colorManagementPrefs(edit=True, cmEnabled=True) + cmds.colorManagementPrefs(edit=True, ocioRulesEnabled=True) + + if use_ocio_v2: + log.info("Using Maya OCIO v2") + if not is_ocio_set: + # Set the Maya 2022+ default OCIO v2 config file path + log.info("Setting default Maya OCIO v2 config") + # Note: Setting "" as value also sets this default however + # introduces a bug where launching a file on startup will prompt + # to save the empty scene before it, so we set using the path. + # This value has been the same for 2022, 2023 and 2024 + path = "/OCIO-configs/Maya2022-default/config.ocio" + cmds.colorManagementPrefs(edit=True, configFilePath=path) + + # set rendering space and view transform + _colormanage(renderingSpaceName=root_dict["renderSpace"]) + _colormanage(viewName=view_name) + _colormanage(displayName=root_dict["displayName"]) + else: + log.info("Using Maya OCIO v1 (legacy)") + if not is_ocio_set: + # Set the Maya default config file path + log.info("Setting default Maya OCIO v1 legacy config") + cmds.colorManagementPrefs(edit=True, configFilePath="legacy") + + # set rendering space and view transform + _colormanage(renderingSpaceName=root_dict["renderSpace"]) + _colormanage(viewTransformName=view_name) + + +@contextlib.contextmanager +def parent_nodes(nodes, parent=None): + # type: (list, str) -> list + """Context manager to un-parent provided nodes and return them back.""" + + def _as_mdagpath(node): + """Return MDagPath for node path.""" + if not node: + return + sel = OpenMaya.MSelectionList() + sel.add(node) + return sel.getDagPath(0) + + # We can only parent dag nodes so we ensure input contains only dag nodes + nodes = cmds.ls(nodes, type="dagNode", long=True) + if not nodes: + # opt-out early + yield + return + + parent_node_path = None + delete_parent = False + if parent: + if not cmds.objExists(parent): + parent_node = cmds.createNode("transform", + name=parent, + skipSelect=False) + delete_parent = True + else: + parent_node = parent + parent_node_path = cmds.ls(parent_node, long=True)[0] + + # Store original parents + node_parents = [] + for node in nodes: + node_parent = get_node_parent(node) + node_parents.append((_as_mdagpath(node), _as_mdagpath(node_parent))) + + try: + for node, node_parent in node_parents: + node_parent_path = node_parent.fullPathName() if node_parent else None # noqa + if node_parent_path == parent_node_path: + # Already a child + continue + + if parent_node_path: + cmds.parent(node.fullPathName(), parent_node_path) + else: + cmds.parent(node.fullPathName(), world=True) + + yield + finally: + # Reparent to original parents + for node, original_parent in node_parents: + node_path = node.fullPathName() + if not node_path: + # Node must have been deleted + continue + + node_parent_path = get_node_parent(node_path) + + original_parent_path = None + if original_parent: + original_parent_path = original_parent.fullPathName() + if not original_parent_path: + # Original parent node must have been deleted + continue + + if node_parent_path != original_parent_path: + if not original_parent_path: + cmds.parent(node_path, world=True) + else: + cmds.parent(node_path, original_parent_path) + + if delete_parent: + cmds.delete(parent_node_path) + + +@contextlib.contextmanager +def maintained_time(): + ct = cmds.currentTime(query=True) + try: + yield + finally: + cmds.currentTime(ct, edit=True) + + +def iter_visible_nodes_in_range(nodes, start, end): + """Yield nodes that are visible in start-end frame range. + + - Ignores intermediateObjects completely. + - Considers animated visibility attributes + upstream visibilities. + + This is optimized for large scenes where some nodes in the parent + hierarchy might have some input connections to the visibilities, + e.g. key, driven keys, connections to other attributes, etc. + + This only does a single time step to `start` if current frame is + not inside frame range since the assumption is made that changing + a frame isn't so slow that it beats querying all visibility + plugs through MDGContext on another frame. + + Args: + nodes (list): List of node names to consider. + start (int, float): Start frame. + end (int, float): End frame. + + Returns: + list: List of node names. These will be long full path names so + might have a longer name than the input nodes. + + """ + # States we consider per node + VISIBLE = 1 # always visible + INVISIBLE = 0 # always invisible + ANIMATED = -1 # animated visibility + + # Ensure integers + start = int(start) + end = int(end) + + # Consider only non-intermediate dag nodes and use the "long" names. + nodes = cmds.ls(nodes, long=True, noIntermediate=True, type="dagNode") + if not nodes: + return + + with maintained_time(): + # Go to first frame of the range if the current time is outside + # the queried range so can directly query all visible nodes on + # that frame. + current_time = cmds.currentTime(query=True) + if not (start <= current_time <= end): + cmds.currentTime(start) + + visible = cmds.ls(nodes, long=True, visible=True) + for node in visible: + yield node + if len(visible) == len(nodes) or start == end: + # All are visible on frame one, so they are at least visible once + # inside the frame range. + return + + # For the invisible ones check whether its visibility and/or + # any of its parents visibility attributes are animated. If so, it might + # get visible on other frames in the range. + def memodict(f): + """Memoization decorator for a function taking a single argument. + + See: http://code.activestate.com/recipes/ + 578231-probably-the-fastest-memoization-decorator-in-the-/ + """ + + class memodict(dict): + def __missing__(self, key): + ret = self[key] = f(key) + return ret + + return memodict().__getitem__ + + @memodict + def get_state(node): + plug = node + ".visibility" + connections = cmds.listConnections(plug, + source=True, + destination=False) + if connections: + return ANIMATED + else: + return VISIBLE if cmds.getAttr(plug) else INVISIBLE + + visible = set(visible) + invisible = [node for node in nodes if node not in visible] + always_invisible = set() + # Iterate over the nodes by short to long names to iterate the highest + # in hierarchy nodes first. So the collected data can be used from the + # cache for parent queries in next iterations. + node_dependencies = dict() + for node in sorted(invisible, key=len): + + state = get_state(node) + if state == INVISIBLE: + always_invisible.add(node) + continue + + # If not always invisible by itself we should go through and check + # the parents to see if any of them are always invisible. For those + # that are "ANIMATED" we consider that this node is dependent on + # that attribute, we store them as dependency. + dependencies = set() + if state == ANIMATED: + dependencies.add(node) + + traversed_parents = list() + for parent in iter_parents(node): + + if parent in always_invisible or get_state(parent) == INVISIBLE: + # When parent is always invisible then consider this parent, + # this node we started from and any of the parents we + # have traversed in-between to be *always invisible* + always_invisible.add(parent) + always_invisible.add(node) + always_invisible.update(traversed_parents) + break + + # If we have traversed the parent before and its visibility + # was dependent on animated visibilities then we can just extend + # its dependencies for to those for this node and break further + # iteration upwards. + parent_dependencies = node_dependencies.get(parent, None) + if parent_dependencies is not None: + dependencies.update(parent_dependencies) + break + + state = get_state(parent) + if state == ANIMATED: + dependencies.add(parent) + + traversed_parents.append(parent) + + if node not in always_invisible and dependencies: + node_dependencies[node] = dependencies + + if not node_dependencies: + return + + # Now we only have to check the visibilities for nodes that have animated + # visibility dependencies upstream. The fastest way to check these + # visibility attributes across different frames is with Python api 2.0 + # so we do that. + @memodict + def get_visibility_mplug(node): + """Return api 2.0 MPlug with cached memoize decorator""" + sel = OpenMaya.MSelectionList() + sel.add(node) + dag = sel.getDagPath(0) + return OpenMaya.MFnDagNode(dag).findPlug("visibility", True) + + @contextlib.contextmanager + def dgcontext(mtime): + """MDGContext context manager""" + context = OpenMaya.MDGContext(mtime) + try: + previous = context.makeCurrent() + yield context + finally: + previous.makeCurrent() + + # We skip the first frame as we already used that frame to check for + # overall visibilities. And end+1 to include the end frame. + scene_units = OpenMaya.MTime.uiUnit() + for frame in range(start + 1, end + 1): + mtime = OpenMaya.MTime(frame, unit=scene_units) + + # Build little cache so we don't query the same MPlug's value + # again if it was checked on this frame and also is a dependency + # for another node + frame_visibilities = {} + with dgcontext(mtime) as context: + for node, dependencies in list(node_dependencies.items()): + for dependency in dependencies: + dependency_visible = frame_visibilities.get(dependency, + None) + if dependency_visible is None: + mplug = get_visibility_mplug(dependency) + dependency_visible = mplug.asBool(context) + frame_visibilities[dependency] = dependency_visible + + if not dependency_visible: + # One dependency is not visible, thus the + # node is not visible. + break + + else: + # All dependencies are visible. + yield node + # Remove node with dependencies for next frame iterations + # because it was visible at least once. + node_dependencies.pop(node) + + # If no more nodes to process break the frame iterations.. + if not node_dependencies: + break + + +def get_attribute_input(attr): + connections = cmds.listConnections(attr, plugs=True, destination=False) + return connections[0] if connections else None + + +def convert_to_maya_fps(fps): + """Convert any fps to supported Maya framerates.""" + float_framerates = [ + 23.976023976023978, + # WTF is 29.97 df vs fps? + 29.97002997002997, + 47.952047952047955, + 59.94005994005994 + ] + # 44100 fps evaluates as 41000.0. Why? Omitting for now. + int_framerates = [ + 2, + 3, + 4, + 5, + 6, + 8, + 10, + 12, + 15, + 16, + 20, + 24, + 25, + 30, + 40, + 48, + 50, + 60, + 75, + 80, + 90, + 100, + 120, + 125, + 150, + 200, + 240, + 250, + 300, + 375, + 400, + 500, + 600, + 750, + 1200, + 1500, + 2000, + 3000, + 6000, + 48000 + ] + + # If input fps is a whole number we'll return. + if float(fps).is_integer(): + # Validate fps is part of Maya's fps selection. + if int(fps) not in int_framerates: + raise ValueError( + "Framerate \"{}\" is not supported in Maya".format(fps) + ) + return int(fps) + else: + # Differences to supported float frame rates. + differences = [] + for i in float_framerates: + differences.append(abs(i - fps)) + + # Validate difference does not stray too far from supported framerates. + min_difference = min(differences) + min_index = differences.index(min_difference) + supported_framerate = float_framerates[min_index] + if min_difference > 0.1: + raise ValueError( + "Framerate \"{}\" strays too far from any supported framerate" + " in Maya. Closest supported framerate is \"{}\"".format( + fps, supported_framerate + ) + ) + + return supported_framerate + + +def write_xgen_file(data, filepath): + """Overwrites data in .xgen files. + + Quite naive approach to mainly overwrite "xgDataPath" and "xgProjectPath". + + Args: + data (dict): Dictionary of key, value. Key matches with xgen file. + For example: + {"xgDataPath": "some/path"} + filepath (string): Absolute path of .xgen file. + """ + # Generate regex lookup for line to key basically + # match any of the keys in `\t{key}\t\t` + keys = "|".join(re.escape(key) for key in data.keys()) + re_keys = re.compile("^\t({})\t\t".format(keys)) + + lines = [] + with open(filepath, "r") as f: + for line in f: + match = re_keys.match(line) + if match: + key = match.group(1) + value = data[key] + line = "\t{}\t\t{}\n".format(key, value) + + lines.append(line) + + with open(filepath, "w") as f: + f.writelines(lines) + + +def get_color_management_preferences(): + """Get and resolve OCIO preferences.""" + data = { + # Is color management enabled. + "enabled": cmds.colorManagementPrefs( + query=True, cmEnabled=True + ), + "rendering_space": cmds.colorManagementPrefs( + query=True, renderingSpaceName=True + ), + "output_transform": cmds.colorManagementPrefs( + query=True, outputTransformName=True + ), + "output_transform_enabled": cmds.colorManagementPrefs( + query=True, outputTransformEnabled=True + ), + "view_transform": cmds.colorManagementPrefs( + query=True, viewTransformName=True + ) + } + + # Split view and display from view_transform. view_transform comes in + # format of "{view} ({display})". + regex = re.compile(r"^(?P.+) \((?P.+)\)$") + if int(cmds.about(version=True)) <= 2020: + # view_transform comes in format of "{view} {display}" in 2020. + regex = re.compile(r"^(?P.+) (?P.+)$") + + match = regex.match(data["view_transform"]) + if not match: + raise ValueError( + "Unable to parse view and display from Maya view transform: '{}' " + "using regex '{}'".format(data["view_transform"], regex.pattern) + ) + + data.update({ + "display": match.group("display"), + "view": match.group("view") + }) + + # Get config absolute path. + path = cmds.colorManagementPrefs( + query=True, configFilePath=True + ) + + # The OCIO config supports a custom token. + maya_resources_token = "" + maya_resources_path = OpenMaya.MGlobal.getAbsolutePathToResources() + path = path.replace(maya_resources_token, maya_resources_path) + + data["config"] = path + + return data + + +def get_color_management_output_transform(): + preferences = get_color_management_preferences() + colorspace = preferences["rendering_space"] + if preferences["output_transform_enabled"]: + colorspace = preferences["output_transform"] + return colorspace + + +def image_info(file_path): + # type: (str) -> dict + """Based on the texture path, get its bit depth and format information. + Take reference from makeTx.py in Arnold: + ImageInfo(filename): Get Image Information for colorspace + AiTextureGetFormat(filename): Get Texture Format + AiTextureGetBitDepth(filename): Get Texture bit depth + Args: + file_path (str): Path to the texture file. + Returns: + dict: Dictionary with the information about the texture file. + """ + from arnold import ( + AiTextureGetBitDepth, + AiTextureGetFormat + ) + # Get Texture Information + img_info = {'filename': file_path} + if os.path.isfile(file_path): + img_info['bit_depth'] = AiTextureGetBitDepth(file_path) # noqa + img_info['format'] = AiTextureGetFormat(file_path) # noqa + else: + img_info['bit_depth'] = 8 + img_info['format'] = "unknown" + return img_info + + +def guess_colorspace(img_info): + # type: (dict) -> str + """Guess the colorspace of the input image filename. + Note: + Reference from makeTx.py + Args: + img_info (dict): Image info generated by :func:`image_info` + Returns: + str: color space name use in the `--colorconvert` + option of maketx. + """ + from arnold import ( + AiTextureInvalidate, + # types + AI_TYPE_BYTE, + AI_TYPE_INT, + AI_TYPE_UINT + ) + try: + if img_info['bit_depth'] <= 16: + if img_info['format'] in (AI_TYPE_BYTE, AI_TYPE_INT, AI_TYPE_UINT): # noqa + return 'sRGB' + else: + return 'linear' + # now discard the image file as AiTextureGetFormat has loaded it + AiTextureInvalidate(img_info['filename']) # noqa + except ValueError: + print(("[maketx] Error: Could not guess" + "colorspace for {}").format(img_info["filename"])) + return "linear" + + +def len_flattened(components): + """Return the length of the list as if it was flattened. + + Maya will return consecutive components as a single entry + when requesting with `maya.cmds.ls` without the `flatten` + flag. Though enabling `flatten` on a large list (e.g. millions) + will result in a slow result. This command will return the amount + of entries in a non-flattened list by parsing the result with + regex. + + Args: + components (list): The non-flattened components. + + Returns: + int: The amount of entries. + + """ + assert isinstance(components, (list, tuple)) + n = 0 + + pattern = re.compile(r"\[(\d+):(\d+)\]") + for c in components: + match = pattern.search(c) + if match: + start, end = match.groups() + n += int(end) - int(start) + 1 + else: + n += 1 + return n + + +def get_all_children(nodes, ignore_intermediate_objects=False): + """Return all children of `nodes` including each instanced child. + Using maya.cmds.listRelatives(allDescendents=True) includes only the first + instance. As such, this function acts as an optimal replacement with a + focus on a fast query. + + Args: + nodes (iterable): List of nodes to get children for. + ignore_intermediate_objects (bool): Ignore any children that + are intermediate objects. + + Returns: + set: Children of input nodes. + + """ + + sel = OpenMaya.MSelectionList() + traversed = set() + iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst) + fn_dag = OpenMaya.MFnDagNode() + for node in nodes: + + if node in traversed: + # Ignore if already processed as a child + # before + continue + + sel.clear() + sel.add(node) + dag = sel.getDagPath(0) + + iterator.reset(dag) + # ignore self + iterator.next() # noqa: B305 + while not iterator.isDone(): + + if ignore_intermediate_objects: + fn_dag.setObject(iterator.currentItem()) + if fn_dag.isIntermediateObject: + iterator.prune() + iterator.next() # noqa: B305 + continue + + path = iterator.fullPathName() + + if path in traversed: + iterator.prune() + iterator.next() # noqa: B305 + continue + + traversed.add(path) + iterator.next() # noqa: B305 + + return traversed + + +def get_capture_preset( + task_name, task_type, product_name, project_settings, log +): + """Get capture preset for playblasting. + + Logic for transitioning from old style capture preset to new capture preset + profiles. + + Args: + task_name (str): Task name. + task_type (str): Task type. + product_name (str): Product name. + project_settings (dict): Project settings. + log (logging.Logger): Logging object. + """ + capture_preset = None + filtering_criteria = { + "task_names": task_name, + "task_types": task_type, + "product_names": product_name + } + + plugin_settings = project_settings["maya"]["publish"]["ExtractPlayblast"] + if plugin_settings["profiles"]: + profile = filter_profiles( + plugin_settings["profiles"], + filtering_criteria, + logger=log + ) + capture_preset = profile.get("capture_preset") + else: + log.warning("No profiles present for Extract Playblast") + + # Backward compatibility for deprecated Extract Playblast settings + # without profiles. + if capture_preset is None: + log.debug( + "Falling back to deprecated Extract Playblast capture preset " + "because no new style playblast profiles are defined." + ) + capture_preset = plugin_settings.get("capture_preset") + + if capture_preset: + # Create deepcopy of preset as we'll change the values + capture_preset = copy.deepcopy(capture_preset) + + viewport_options = capture_preset["ViewportOptions"] + # Change 'list' to 'dict' for 'capture.py' + viewport_options["pluginObjects"] = { + item["name"]: item["value"] + for item in viewport_options["pluginObjects"] + } + return capture_preset or {} + + +def get_reference_node(members, log=None): + """Get the reference node from the container members + Args: + members: list of node names + + Returns: + str: Reference node name. + + """ + + # Collect the references without .placeHolderList[] attributes as + # unique entries (objects only) and skipping the sharedReferenceNode. + references = set() + for ref in cmds.ls(members, exactType="reference", objectsOnly=True): + + # Ignore any `:sharedReferenceNode` + if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"): + continue + + # Ignore _UNKNOWN_REF_NODE_ (PLN-160) + if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): + continue + + if not is_valid_reference_node(ref): + continue + + references.add(ref) + + assert references, "No reference node found in container" + + # Get highest reference node (least parents) + highest = min(references, + key=lambda x: len(get_reference_node_parents(x))) + + # Warn the user when we're taking the highest reference node + if len(references) > 1: + if not log: + log = logging.getLogger(__name__) + + log.warning("More than one reference node found in " + "container, using highest reference node: " + "%s (in: %s)", highest, list(references)) + + return highest + + +def get_reference_node_parents(ref): + """Return all parent reference nodes of reference node + + Args: + ref (str): reference node. + + Returns: + list: The upstream parent reference nodes. + + """ + def _get_parent(reference_node): + """Return parent reference node, but ignore invalid reference nodes""" + if not is_valid_reference_node(reference_node): + return + return cmds.referenceQuery(reference_node, + referenceNode=True, + parent=True) + + parent = _get_parent(ref) + parents = [] + while parent: + parents.append(parent) + parent = _get_parent(parent) + return parents + + +def create_rig_animation_instance( + nodes, context, namespace, options=None, log=None +): + """Create an animation publish instance for loaded rigs. + + See the RecreateRigAnimationInstance inventory action on how to use this + for loaded rig containers. + + Arguments: + nodes (list): Member nodes of the rig instance. + context (dict): Representation context of the rig container + namespace (str): Namespace of the rig container + options (dict, optional): Additional loader data + log (logging.Logger, optional): Logger to log to if provided + + Returns: + None + + """ + if options is None: + options = {} + name = context["representation"]["name"] + output = next((node for node in nodes if + node.endswith("out_SET")), None) + controls = next((node for node in nodes if + node.endswith("controls_SET")), None) + if name != "fbx": + assert output, "No out_SET in rig, this is a bug." + assert controls, "No controls_SET in rig, this is a bug." + + anim_skeleton = next((node for node in nodes if + node.endswith("skeletonAnim_SET")), None) + skeleton_mesh = next((node for node in nodes if + node.endswith("skeletonMesh_SET")), None) + + # Find the roots amongst the loaded nodes + roots = ( + cmds.ls(nodes, assemblies=True, long=True) or + get_highest_in_hierarchy(nodes) + ) + assert roots, "No root nodes in rig, this is a bug." + + folder_entity = context["folder"] + product_entity = context["product"] + product_type = product_entity["productType"] + product_name = product_entity["name"] + + custom_product_name = options.get("animationProductName") + if custom_product_name: + formatting_data = { + "folder": { + "name": folder_entity["name"] + }, + "product": { + "type": product_type, + "name": product_name, + }, + "asset": folder_entity["name"], + "subset": product_name, + "family": product_type + } + namespace = get_custom_namespace( + custom_product_name.format(**formatting_data) + ) + + if log: + log.info("Creating product: {}".format(namespace)) + + # Fill creator identifier + creator_identifier = "io.openpype.creators.maya.animation" + + host = registered_host() + create_context = CreateContext(host) + # Create the animation instance + rig_sets = [output, controls, anim_skeleton, skeleton_mesh] + # Remove sets that this particular rig does not have + rig_sets = [s for s in rig_sets if s is not None] + with maintained_selection(): + cmds.select(rig_sets + roots, noExpand=True) + create_context.create( + creator_identifier=creator_identifier, + variant=namespace, + pre_create_data={"use_selection": True} + ) + + +def get_node_index_under_parent(node: str) -> int: + """Return the index of a DAG node under its parent. + + Arguments: + node (str): A DAG Node path. + + Returns: + int: The DAG node's index under its parents or world + + """ + node = cmds.ls(node, long=True)[0] # enforce long names + parent = node.rsplit("|", 1)[0] + if not parent: + return cmds.ls(assemblies=True, long=True).index(node) + else: + return cmds.listRelatives(parent, + children=True, + fullPath=True).index(node) diff --git a/client/ayon_maya/api/lib_renderproducts.py b/client/ayon_maya/api/lib_renderproducts.py new file mode 100644 index 00000000..52c282c6 --- /dev/null +++ b/client/ayon_maya/api/lib_renderproducts.py @@ -0,0 +1,1469 @@ +# -*- coding: utf-8 -*- +"""Module handling expected render output from Maya. + +This module is used in :mod:`collect_render` and :mod:`collect_vray_scene`. + +Note: + To implement new renderer, just create new class inheriting from + :class:`ARenderProducts` and add it to :func:`RenderProducts.get()`. + +Attributes: + R_SINGLE_FRAME (:class:`re.Pattern`): Find single frame number. + R_FRAME_RANGE (:class:`re.Pattern`): Find frame range. + R_FRAME_NUMBER (:class:`re.Pattern`): Find frame number in string. + R_LAYER_TOKEN (:class:`re.Pattern`): Find layer token in image prefixes. + R_AOV_TOKEN (:class:`re.Pattern`): Find AOV token in image prefixes. + R_SUBSTITUTE_AOV_TOKEN (:class:`re.Pattern`): Find and substitute AOV token + in image prefixes. + R_REMOVE_AOV_TOKEN (:class:`re.Pattern`): Find and remove AOV token in + image prefixes. + R_CLEAN_FRAME_TOKEN (:class:`re.Pattern`): Find and remove unfilled + Renderman frame token in image prefix. + R_CLEAN_EXT_TOKEN (:class:`re.Pattern`): Find and remove unfilled Renderman + extension token in image prefix. + R_SUBSTITUTE_LAYER_TOKEN (:class:`re.Pattern`): Find and substitute render + layer token in image prefixes. + R_SUBSTITUTE_SCENE_TOKEN (:class:`re.Pattern`): Find and substitute scene + token in image prefixes. + R_SUBSTITUTE_CAMERA_TOKEN (:class:`re.Pattern`): Find and substitute camera + token in image prefixes. + IMAGE_PREFIXES (dict): Mapping between renderers and their respective + image prefix attribute names. + +Thanks: + Roy Nieterau (BigRoy) / Colorbleed for overhaul of original + *expected_files*. + +""" + +import logging +import re +import os +from abc import ABCMeta, abstractmethod + +import six +import attr + +from . import lib +from . import lib_rendersetup +from ayon_core.pipeline.colorspace import get_ocio_config_views + +from maya import cmds, mel + +log = logging.getLogger(__name__) + +R_SINGLE_FRAME = re.compile(r"^(-?)\d+$") +R_FRAME_RANGE = re.compile(r"^(?P(-?)\d+)-(?P(-?)\d+)$") +R_FRAME_NUMBER = re.compile(r".+\.(?P[0-9]+)\..+") +R_LAYER_TOKEN = re.compile( + r".*((?:%l)|(?:)|(?:)).*", re.IGNORECASE +) +R_AOV_TOKEN = re.compile(r".*%a.*|.*.*|.*.*", re.IGNORECASE) +R_SUBSTITUTE_AOV_TOKEN = re.compile(r"%a||", re.IGNORECASE) +R_REMOVE_AOV_TOKEN = re.compile( + r"_%a|\.%a|_|\.|_|\.", re.IGNORECASE) +# to remove unused renderman tokens +R_CLEAN_FRAME_TOKEN = re.compile(r"\.?\.?", re.IGNORECASE) +R_CLEAN_EXT_TOKEN = re.compile(r"\.?\.?", re.IGNORECASE) + +R_SUBSTITUTE_LAYER_TOKEN = re.compile( + r"%l||", re.IGNORECASE +) +R_SUBSTITUTE_CAMERA_TOKEN = re.compile(r"%c|", re.IGNORECASE) +R_SUBSTITUTE_SCENE_TOKEN = re.compile(r"%s|", re.IGNORECASE) + +# not sure about the renderman image prefix +IMAGE_PREFIXES = { + "vray": "vraySettings.fileNamePrefix", + "arnold": "defaultRenderGlobals.imageFilePrefix", + "renderman": "rmanGlobals.imageFileFormat", + "redshift": "defaultRenderGlobals.imageFilePrefix", + "mayahardware2": "defaultRenderGlobals.imageFilePrefix" +} + +RENDERMAN_IMAGE_DIR = "/" + + +def has_tokens(string, tokens): + """Return whether any of tokens is in input string (case-insensitive)""" + pattern = "({})".format("|".join(re.escape(token) for token in tokens)) + match = re.search(pattern, string, re.IGNORECASE) + return bool(match) + + +@attr.s +class LayerMetadata(object): + """Data class for Render Layer metadata.""" + frameStart = attr.ib() + frameEnd = attr.ib() + cameras = attr.ib() + sceneName = attr.ib() + layerName = attr.ib() + renderer = attr.ib() + defaultExt = attr.ib() + filePrefix = attr.ib() + frameStep = attr.ib(default=1) + padding = attr.ib(default=4) + + # Render Products + products = attr.ib(init=False, default=attr.Factory(list)) + + # The AOV separator token. Note that not all renderers define an explicit + # render separator but allow to put the AOV/RenderPass token anywhere in + # the file path prefix. For those renderers we'll fall back to whatever + # is between the last occurrences of and tokens. + aov_separator = attr.ib(default="_") + + +@attr.s +class RenderProduct(object): + """Describes an image or other file-like artifact produced by a render. + + Warning: + This currently does NOT return as a product PER render camera. + A single Render Product will generate files per camera. E.g. with two + cameras each render product generates two sequences on disk assuming + the file path prefix correctly uses the tokens. + + """ + productName = attr.ib() + ext = attr.ib() # extension + colorspace = attr.ib() # colorspace + aov = attr.ib(default=None) # source aov + driver = attr.ib(default=None) # source driver + multipart = attr.ib(default=False) # multichannel file + camera = attr.ib(default=None) # used only when rendering + # from multiple cameras + + +def get(layer, render_instance=None): + # type: (str, object) -> ARenderProducts + """Get render details and products for given renderer and render layer. + + Args: + layer (str): Name of render layer + render_instance (pyblish.api.Instance): Publish instance. + If not provided an empty mock instance is used. + + Returns: + ARenderProducts: The correct RenderProducts instance for that + renderlayer. + + Raises: + :exc:`UnsupportedRendererException`: If requested renderer + is not supported. It needs to be implemented by extending + :class:`ARenderProducts` and added to this methods ``if`` + statement. + + """ + + if render_instance is None: + # For now produce a mock instance + class Instance(object): + data = {} + render_instance = Instance() + + renderer_name = lib.get_attr_in_layer( + "defaultRenderGlobals.currentRenderer", + layer=layer + ) + + renderer = { + "arnold": RenderProductsArnold, + "vray": RenderProductsVray, + "redshift": RenderProductsRedshift, + "renderman": RenderProductsRenderman, + "mayahardware2": RenderProductsMayaHardware + }.get(renderer_name.lower(), None) + if renderer is None: + raise UnsupportedRendererException( + "Unsupported renderer: {}".format(renderer_name) + ) + + return renderer(layer, render_instance) + + +@six.add_metaclass(ABCMeta) +class ARenderProducts: + """Abstract class with common code for all renderers. + + Attributes: + renderer (str): name of renderer. + + """ + + renderer = None + + def __init__(self, layer, render_instance): + """Constructor.""" + self.layer = layer + self.render_instance = render_instance + self.multipart = self.get_multipart() + + # Initialize + self.layer_data = self._get_layer_data() + self.layer_data.products = self.get_render_products() + + def get_multipart(self): + raise NotImplementedError( + "The render product implementation does not have a " + "\"get_multipart\" method." + ) + + def has_camera_token(self): + # type: () -> bool + """Check if camera token is in image prefix. + + Returns: + bool: True/False if camera token is present. + + """ + return "" in self.layer_data.filePrefix.lower() + + @abstractmethod + def get_render_products(self): + """To be implemented by renderer class. + + This should return a list of RenderProducts. + + Returns: + list: List of RenderProduct + + """ + + @staticmethod + def sanitize_camera_name(camera): + # type: (str) -> str + """Sanitize camera name. + + Remove Maya illegal characters from camera name. + + Args: + camera (str): Maya camera name. + + Returns: + (str): sanitized camera name + + Example: + >>> ARenderProducts.sanizite_camera_name('test:camera_01') + test_camera_01 + + """ + return re.sub('[^0-9a-zA-Z_]+', '_', camera) + + def get_renderer_prefix(self): + # type: () -> str + """Return prefix for specific renderer. + + This is for most renderers the same and can be overridden if needed. + + Returns: + str: String with image prefix containing tokens + + Raises: + :exc:`UnsupportedRendererException`: If we requested image + prefix for renderer we know nothing about. + See :data:`IMAGE_PREFIXES` for mapping of renderers and + image prefixes. + + """ + try: + prefix_attr = IMAGE_PREFIXES[self.renderer] + except KeyError: + raise UnsupportedRendererException( + "Unsupported renderer {}".format(self.renderer) + ) + + # Note: When this attribute is never set (e.g. on maya launch) then + # this can return None even though it is a string attribute + prefix = self._get_attr(prefix_attr) + + if not prefix: + # Fall back to scene name by default + log.warning("Image prefix not set, using ") + prefix = "" + + return prefix + + def get_render_attribute(self, attribute): + """Get attribute from render options. + + Args: + attribute (str): name of attribute to be looked up. + + Returns: + Attribute value + + """ + return self._get_attr("defaultRenderGlobals", attribute) + + def _get_attr(self, node_attr, attribute=None, as_string=True): + """Return the value of the attribute in the renderlayer + + For readability this allows passing in the attribute in two ways. + + As a single argument: + _get_attr("node.attr") + Or as two arguments: + _get_attr("node", "attr") + + Returns: + Value of the attribute inside the layer this instance is set to. + + """ + + if attribute is None: + plug = node_attr + else: + plug = "{}.{}".format(node_attr, attribute) + + return lib.get_attr_in_layer(plug, layer=self.layer, as_string=as_string) + + @staticmethod + def extract_separator(file_prefix): + """Extract AOV separator character from the prefix. + + Default behavior extracts the part between + last occurrences of and + + Todo: + This code also triggers for V-Ray which overrides it explicitly + so this code will invalidly debug log it couldn't extract the + AOV separator even though it does set it in RenderProductsVray. + + Args: + file_prefix (str): File prefix with tokens. + + Returns: + str or None: prefix character if it can be extracted. + """ + layer_tokens = ["", ""] + aov_tokens = ["", ""] + + def match_last(tokens, text): + """regex match the last occurrence from a list of tokens""" + pattern = "(?:.*)({})".format("|".join(tokens)) + return re.search(pattern, text, re.IGNORECASE) + + layer_match = match_last(layer_tokens, file_prefix) + aov_match = match_last(aov_tokens, file_prefix) + separator = None + if layer_match and aov_match: + matches = sorted((layer_match, aov_match), + key=lambda match: match.end(1)) + separator = file_prefix[matches[0].end(1):matches[1].start(1)] + return separator + + def _get_layer_data(self): + # type: () -> LayerMetadata + # ______________________________________________ + # ____________________/ ____________________________________________/ + # 1 - get scene name /__________________/ + # ____________________/ + _, scene_basename = os.path.split(cmds.file(q=True, loc=True)) + scene_name, _ = os.path.splitext(scene_basename) + kwargs = {} + file_prefix = self.get_renderer_prefix() + + # If the Render Layer belongs to a Render Setup layer then the + # output name is based on the Render Setup Layer name without + # the `rs_` prefix. + layer_name = self.layer + rs_layer = lib_rendersetup.get_rendersetup_layer(layer_name) + if rs_layer: + layer_name = rs_layer + + if self.layer == "defaultRenderLayer": + # defaultRenderLayer renders as masterLayer + layer_name = "masterLayer" + + separator = self.extract_separator(file_prefix) + if separator: + kwargs["aov_separator"] = separator + else: + log.debug("Couldn't extract aov separator from " + "file prefix: {}".format(file_prefix)) + + # todo: Support Custom Frames sequences 0,5-10,100-120 + # Deadline allows submitting renders with a custom frame list + # to support those cases we might want to allow 'custom frames' + # to be overridden to `ExpectFiles` class? + return LayerMetadata( + frameStart=int(self.get_render_attribute("startFrame")), + frameEnd=int(self.get_render_attribute("endFrame")), + frameStep=int(self.get_render_attribute("byFrameStep")), + padding=int(self.get_render_attribute("extensionPadding")), + # if we have token in prefix path we'll expect output for + # every renderable camera in layer. + cameras=self.get_renderable_cameras(), + sceneName=scene_name, + layerName=layer_name, + renderer=self.renderer, + defaultExt=self._get_attr("defaultRenderGlobals.imfPluginKey"), + filePrefix=file_prefix, + **kwargs + ) + + def _generate_file_sequence( + self, layer_data, + force_aov_name=None, + force_ext=None, + force_cameras=None): + # type: (LayerMetadata, str, str, list) -> list + expected_files = [] + cameras = force_cameras or layer_data.cameras + ext = force_ext or layer_data.defaultExt + for cam in cameras: + file_prefix = layer_data.filePrefix + mappings = ( + (R_SUBSTITUTE_SCENE_TOKEN, layer_data.sceneName), + (R_SUBSTITUTE_LAYER_TOKEN, layer_data.layerName), + (R_SUBSTITUTE_CAMERA_TOKEN, self.sanitize_camera_name(cam)), + # this is required to remove unfilled aov token, for example + # in Redshift + (R_REMOVE_AOV_TOKEN, "") if not force_aov_name \ + else (R_SUBSTITUTE_AOV_TOKEN, force_aov_name), + + (R_CLEAN_FRAME_TOKEN, ""), + (R_CLEAN_EXT_TOKEN, ""), + ) + + for regex, value in mappings: + file_prefix = re.sub(regex, value, file_prefix) + + for frame in range( + int(layer_data.frameStart), + int(layer_data.frameEnd) + 1, + int(layer_data.frameStep), + ): + frame_str = str(frame).rjust(layer_data.padding, "0") + expected_files.append( + "{}.{}.{}".format(file_prefix, frame_str, ext) + ) + return expected_files + + def get_files(self, product): + # type: (RenderProduct) -> list + """Return list of expected files. + + It will translate render token strings ('', etc.) to + their values. This task is tricky as every renderer deals with this + differently. That's why we expose `get_files` as a method on the + Renderer class so it can be overridden for complex cases. + + Args: + product (RenderProduct): Render product to be used for file + generation. + + Returns: + List of files + + """ + return self._generate_file_sequence( + self.layer_data, + force_aov_name=product.productName, + force_ext=product.ext, + force_cameras=[product.camera] + ) + + def get_renderable_cameras(self): + # type: () -> list + """Get all renderable camera transforms. + + Returns: + list: list of renderable cameras. + + """ + + renderable_cameras = [ + cam for cam in cmds.ls(cameras=True) + if self._get_attr(cam, "renderable") + ] + + # The output produces a sanitized name for using its + # shortest unique path of the transform so we'll return + # at least that unique path. This could include a parent + # name too when two cameras have the same name but are + # in a different hierarchy, e.g. "group1|cam" and "group2|cam" + def get_name(camera): + return cmds.ls(cmds.listRelatives(camera, + parent=True, + fullPath=True))[0] + + return [get_name(cam) for cam in renderable_cameras] + + +class RenderProductsArnold(ARenderProducts): + """Render products for Arnold renderer. + + References: + mtoa.utils.getFileName() + mtoa.utils.ui.common.updateArnoldTargetFilePreview() + + Notes: + - Output Denoising AOVs are not currently included. + - Only Frame/Animation ext: name.#.ext is supported. + - Use Custom extension is not supported. + - and tokens not tested + - With Merge AOVs but in File Name Prefix Arnold + will still NOT merge the aovs. This class correctly resolves + it - but user should be aware. + - File Path Prefix overrides per AOV driver are not implemented + + Attributes: + aiDriverExtension (dict): Arnold AOV driver extension mapping. + Is there a better way? + renderer (str): name of renderer. + + """ + renderer = "arnold" + aiDriverExtension = { + "jpeg": "jpg", + "exr": "exr", + "deepexr": "exr", + "png": "png", + "tiff": "tif", + "mtoa_shaders": "ass", # TODO: research what those last two should be + "maya": "", + } + + def get_renderer_prefix(self): + + prefix = super(RenderProductsArnold, self).get_renderer_prefix() + merge_aovs = self._get_attr("defaultArnoldDriver.mergeAOVs") + if not merge_aovs and "" not in prefix.lower(): + # When Merge AOVs is disabled and token not present + # then Arnold prepends / to the output path. + # todo: It's untested what happens if AOV driver has an + # an explicit override path prefix. + prefix = "/" + prefix + + return prefix + + def get_multipart(self): + multipart = False + multilayer = bool(self._get_attr("defaultArnoldDriver.multipart")) + merge_AOVs = bool(self._get_attr("defaultArnoldDriver.mergeAOVs")) + if multilayer or merge_AOVs: + multipart = True + + return multipart + + def _get_aov_render_products(self, aov, cameras=None): + """Return all render products for the AOV""" + + products = [] + aov_name = self._get_attr(aov, "name") + ai_drivers = cmds.listConnections("{}.outputs".format(aov), + source=True, + destination=False, + type="aiAOVDriver") or [] + if not cameras: + cameras = [ + self.sanitize_camera_name( + self.get_renderable_cameras()[0] + ) + ] + + for ai_driver in ai_drivers: + colorspace = self._get_colorspace( + ai_driver + ".colorManagement" + ) + # todo: check aiAOVDriver.prefix as it could have + # a custom path prefix set for this driver + + # Skip Drivers set only for GUI + # 0: GUI, 1: Batch, 2: GUI and Batch + output_mode = self._get_attr(ai_driver, "outputMode") + if output_mode == 0: # GUI only + log.warning("%s has Output Mode set to GUI, " + "skipping...", ai_driver) + continue + + ai_translator = self._get_attr(ai_driver, "aiTranslator") + try: + ext = self.aiDriverExtension[ai_translator] + except KeyError: + raise AOVError( + "Unrecognized arnold driver format " + "for AOV - {}".format(aov_name) + ) + + # If aov RGBA is selected, arnold will translate it to `beauty` + name = aov_name + if name == "RGBA": + name = "beauty" + + # Support Arnold light groups for AOVs + # Global AOV: When disabled the main layer is + # not written: `{pass}` + # All Light Groups: When enabled, a `{pass}_lgroups` file is + # written and is always merged into a + # single file + # Light Groups List: When set, a product per light + # group is written + # e.g. {pass}_front, {pass}_rim + global_aov = self._get_attr(aov, "globalAov") + if global_aov: + for camera in cameras: + product = RenderProduct( + productName=name, + ext=ext, + aov=aov_name, + driver=ai_driver, + multipart=self.multipart, + camera=camera, + colorspace=colorspace + ) + products.append(product) + + all_light_groups = self._get_attr(aov, "lightGroups") + if all_light_groups: + # All light groups is enabled. A single multipart + # Render Product + for camera in cameras: + product = RenderProduct( + productName=name + "_lgroups", + ext=ext, + aov=aov_name, + driver=ai_driver, + # Always multichannel output + multipart=True, + camera=camera, + colorspace=colorspace + ) + products.append(product) + else: + value = self._get_attr(aov, "lightGroupsList") + if not value: + continue + selected_light_groups = value.strip().split() + for light_group in selected_light_groups: + # Render Product per selected light group + aov_light_group_name = "{}_{}".format(name, light_group) + for camera in cameras: + product = RenderProduct( + productName=aov_light_group_name, + aov=aov_name, + driver=ai_driver, + ext=ext, + camera=camera, + colorspace=colorspace + ) + products.append(product) + + return products + + def _get_colorspace(self, attribute): + """Resolve colorspace from Arnold settings.""" + + def _view_transform(): + preferences = lib.get_color_management_preferences() + views_data = get_ocio_config_views(preferences["config"]) + view_data = views_data[ + "{}/{}".format(preferences["display"], preferences["view"]) + ] + return view_data["colorspace"] + + def _raw(): + preferences = lib.get_color_management_preferences() + return preferences["rendering_space"] + + resolved_values = { + "Raw": _raw, + "Use View Transform": _view_transform, + # Default. Same as Maya Preferences. + "Use Output Transform": lib.get_color_management_output_transform + } + return resolved_values[self._get_attr(attribute)]() + + def get_render_products(self): + """Get all AOVs. + + See Also: + :func:`ARenderProducts.get_render_products()` + + Raises: + :class:`AOVError`: If AOV cannot be determined. + + """ + + if not cmds.ls("defaultArnoldRenderOptions", type="aiOptions"): + # this occurs when Render Setting windows was not opened yet. In + # such case there are no Arnold options created so query for AOVs + # will fail. We terminate here as there are no AOVs specified then. + # This state will most probably fail later on some Validator + # anyway. + return [] + + # check if camera token is in prefix. If so, and we have list of + # renderable cameras, generate render product for each and every + # of them. + cameras = [ + self.sanitize_camera_name(c) + for c in self.get_renderable_cameras() + ] + + default_ext = self._get_attr("defaultRenderGlobals.imfPluginKey") + colorspace = self._get_colorspace( + "defaultArnoldDriver.colorManagement" + ) + beauty_products = [ + RenderProduct( + productName="beauty", + ext=default_ext, + driver="defaultArnoldDriver", + camera=camera, + colorspace=colorspace + ) for camera in cameras + ] + + # AOVs > Legacy > Maya Render View > Mode + aovs_enabled = bool( + self._get_attr( + "defaultArnoldRenderOptions.aovMode", as_string=False) + ) + if not aovs_enabled: + return beauty_products + + # Common > File Output > Merge AOVs or + # We don't need to check for Merge AOVs due to overridden + # `get_renderer_prefix()` behavior which forces + has_renderpass_token = ( + "" in self.layer_data.filePrefix.lower() + ) + if not has_renderpass_token: + for product in beauty_products: + product.multipart = True + return beauty_products + + # AOVs are set to be rendered separately. We should expect + # token in path. + # handle aovs from references + use_ref_aovs = self.render_instance.data.get( + "useReferencedAovs", False) or False + + aovs = cmds.ls(type="aiAOV") + if not use_ref_aovs: + ref_aovs = cmds.ls(type="aiAOV", referencedNodes=True) + aovs = list(set(aovs) - set(ref_aovs)) + + products = [] + + # Append the AOV products + for aov in aovs: + enabled = self._get_attr(aov, "enabled") + if not enabled: + continue + + # For now stick to the legacy output format. + aov_products = self._get_aov_render_products(aov, cameras) + products.extend(aov_products) + + if all(product.aov != "RGBA" for product in products): + # Append default 'beauty' as this is arnolds default. + # However, it is excluded whenever a RGBA pass is enabled. + # For legibility add the beauty layer as first entry + products += beauty_products + + # TODO: Output Denoising AOVs? + + return products + + +class RenderProductsVray(ARenderProducts): + """Expected files for V-Ray renderer. + + Notes: + - "Disabled" animation incorrectly returns frames in filename + - "Renumber Frames" is not supported + + Reference: + vrayAddRenderElementImpl() in vrayCreateRenderElementsTab.mel + + """ + # todo: detect whether rendering with V-Ray GPU + whether AOV is supported + + renderer = "vray" + + def get_multipart(self): + multipart = False + image_format = self._get_attr("vraySettings.imageFormatStr") + if image_format == "exr (multichannel)": + multipart = True + + return multipart + + def get_renderer_prefix(self): + # type: () -> str + """Get image prefix for V-Ray. + + This overrides :func:`ARenderProducts.get_renderer_prefix()` as + we must add `` token manually. This is done only for + non-multipart outputs, where `` token doesn't make sense. + + See also: + :func:`ARenderProducts.get_renderer_prefix()` + + """ + prefix = super(RenderProductsVray, self).get_renderer_prefix() + if self.multipart: + return prefix + aov_separator = self._get_aov_separator() + prefix = "{}{}".format(prefix, aov_separator) + return prefix + + def _get_aov_separator(self): + # type: () -> str + """Return the V-Ray AOV/Render Elements separator""" + return self._get_attr( + "vraySettings.fileNameRenderElementSeparator" + ) + + def _get_layer_data(self): + # type: () -> LayerMetadata + """Override to get vray specific extension.""" + layer_data = super(RenderProductsVray, self)._get_layer_data() + + default_ext = self._get_attr("vraySettings.imageFormatStr") + if default_ext in ["exr (multichannel)", "exr (deep)"]: + default_ext = "exr" + layer_data.defaultExt = default_ext + layer_data.padding = self._get_attr("vraySettings.fileNamePadding") + + layer_data.aov_separator = self._get_aov_separator() + + return layer_data + + def get_render_products(self): + """Get all AOVs. + + See Also: + :func:`ARenderProducts.get_render_products()` + + """ + if not cmds.ls("vraySettings", type="VRaySettingsNode"): + # this occurs when Render Setting windows was not opened yet. In + # such case there are no VRay options created so query for AOVs + # will fail. We terminate here as there are no AOVs specified then. + # This state will most probably fail later on some Validator + # anyway. + return [] + + cameras = [ + self.sanitize_camera_name(c) + for c in self.get_renderable_cameras() + ] + + image_format_str = self._get_attr("vraySettings.imageFormatStr") + default_ext = image_format_str + if default_ext in {"exr (multichannel)", "exr (deep)"}: + default_ext = "exr" + + colorspace = lib.get_color_management_output_transform() + products = [] + + # add beauty as default when not disabled + dont_save_rgb = self._get_attr("vraySettings.dontSaveRgbChannel") + if not dont_save_rgb: + for camera in cameras: + products.append( + RenderProduct( + productName="", + ext=default_ext, + camera=camera, + colorspace=colorspace, + multipart=self.multipart + ) + ) + + # separate alpha file + separate_alpha = self._get_attr("vraySettings.separateAlpha") + if separate_alpha: + for camera in cameras: + products.append( + RenderProduct( + productName="Alpha", + ext=default_ext, + camera=camera, + colorspace=colorspace, + multipart=self.multipart + ) + ) + if self.multipart: + # AOVs are merged in m-channel file, only main layer is rendered + return products + + # handle aovs from references + use_ref_aovs = self.render_instance.data.get( + "useReferencedAovs", False) or False + + # this will have list of all aovs no matter if they are coming from + # reference or not. + aov_types = ["VRayRenderElement", "VRayRenderElementSet"] + aovs = cmds.ls(type=aov_types) + if not use_ref_aovs: + ref_aovs = cmds.ls(type=aov_types, referencedNodes=True) or [] + aovs = list(set(aovs) - set(ref_aovs)) + + for aov in aovs: + enabled = self._get_attr(aov, "enabled") + if not enabled: + continue + + class_type = self._get_attr(aov + ".vrayClassType") + if class_type == "LightMixElement": + # Special case which doesn't define a name by itself but + # instead seems to output multiple Render Products, + # specifically "Self_Illumination" and "Environment" + product_names = ["Self_Illumination", "Environment"] + for camera in cameras: + for name in product_names: + product = RenderProduct(productName=name, + ext=default_ext, + aov=aov, + camera=camera, + colorspace=colorspace) + products.append(product) + # Continue as we've processed this special case AOV + continue + + aov_name = self._get_vray_aov_name(aov) + for camera in cameras: + product = RenderProduct( + productName=aov_name, + ext=default_ext, + aov=aov, + camera=camera, + colorspace=colorspace + ) + products.append(product) + + return products + + def _get_vray_aov_attr(self, node, prefix): + """Get value for attribute that starts with key in name + + V-Ray AOVs have attribute names that include the type + of AOV in the attribute name, for example: + - vray_filename_rawdiffuse + - vray_filename_velocity + - vray_name_gi + - vray_explicit_name_extratex + + To simplify querying the "vray_filename" or "vray_name" + attributes we just find the first attribute that has + that particular "{prefix}_" in the attribute name. + + Args: + node (str): AOV node name + prefix (str): Prefix of the attribute name. + + Returns: + Value of the attribute if it exists, else None + + """ + attrs = cmds.listAttr(node, string="{}_*".format(prefix)) + if not attrs: + return None + + assert len(attrs) == 1, "Found more than one attribute: %s" % attrs + attr = attrs[0] + + return self._get_attr(node, attr) + + def _get_vray_aov_name(self, node): + """Get AOVs name from Vray. + + Args: + node (str): aov node name. + + Returns: + str: aov name. + + """ + + vray_explicit_name = self._get_vray_aov_attr(node, + "vray_explicit_name") + vray_filename = self._get_vray_aov_attr(node, "vray_filename") + vray_name = self._get_vray_aov_attr(node, "vray_name") + final_name = vray_explicit_name or vray_filename or vray_name or None + + class_type = self._get_attr(node, "vrayClassType") + if not vray_explicit_name: + # Explicit name takes precedence and overrides completely + # otherwise add the connected node names to the special cases + # Any namespace colon ':' gets replaced to underscore '_' + # so we sanitize using `sanitize_camera_name` + def _get_source_name(node, attr): + """Return sanitized name of input connection to attribute""" + plug = "{}.{}".format(node, attr) + connections = cmds.listConnections(plug, + source=True, + destination=False) + if connections: + return self.sanitize_camera_name(connections[0]) + + if class_type == "MaterialSelectElement": + # Name suffix is based on the connected material or set + attrs = [ + "vray_mtllist_mtlselect", + "vray_mtl_mtlselect" + ] + for attribute in attrs: + name = _get_source_name(node, attribute) + if name: + final_name += '_{}'.format(name) + break + else: + log.warning("Material Select Element has no " + "selected materials: %s", node) + + elif class_type == "ExtraTexElement": + # Name suffix is based on the connected textures + extratex_type = self._get_attr(node, "vray_type_extratex") + attr = { + 0: "vray_texture_extratex", + 1: "vray_float_texture_extratex", + 2: "vray_int_texture_extratex", + }.get(extratex_type) + name = _get_source_name(node, attr) + if name: + final_name += '_{}'.format(name) + else: + log.warning("Extratex Element has no incoming texture") + + assert final_name, "Output filename not defined for AOV: %s" % node + + return final_name + + +class RenderProductsRedshift(ARenderProducts): + """Expected files for Redshift renderer. + + Notes: + - `get_files()` only supports rendering with frames, like "animation" + + Attributes: + + unmerged_aovs (list): Name of aovs that are not merged into resulting + exr and we need them specified in Render Products output. + + """ + + renderer = "redshift" + unmerged_aovs = {"Cryptomatte"} + + def get_files(self, product): + # When outputting AOVs we need to replace Redshift specific AOV tokens + # with Maya render tokens for generating file sequences. We validate to + # a specific AOV fileprefix so we only need to account for one + # replacement. + if not product.multipart and product.driver: + file_prefix = self._get_attr(product.driver + ".filePrefix") + self.layer_data.filePrefix = file_prefix.replace( + "/", + "//" + ) + + return super(RenderProductsRedshift, self).get_files(product) + + def get_multipart(self): + # For Redshift we don't directly return upon forcing multilayer + # due to some AOVs still being written into separate files, + # like Cryptomatte. + # AOVs are merged in multi-channel file + multipart = False + force_layer = bool( + self._get_attr("redshiftOptions.exrForceMultilayer") + ) + if force_layer: + multipart = True + + return multipart + + def get_renderer_prefix(self): + """Get image prefix for Redshift. + + This overrides :func:`ARenderProducts.get_renderer_prefix()` as + we must add `` token manually. This is done only for + non-multipart outputs, where `` token doesn't make sense. + + See also: + :func:`ARenderProducts.get_renderer_prefix()` + + """ + prefix = super(RenderProductsRedshift, self).get_renderer_prefix() + if self.multipart: + return prefix + separator = self.extract_separator(prefix) + prefix = "{}{}".format(prefix, separator or "_") + return prefix + + def get_render_products(self): + """Get all AOVs. + + See Also: + :func:`ARenderProducts.get_render_products()` + + """ + + if not cmds.ls("redshiftOptions", type="RedshiftOptions"): + # this occurs when Render Setting windows was not opened yet. In + # such case there are no Redshift options created so query for AOVs + # will fail. We terminate here as there are no AOVs specified then. + # This state will most probably fail later on some Validator + # anyway. + return [] + + cameras = [ + self.sanitize_camera_name(c) + for c in self.get_renderable_cameras() + ] + + # Get Redshift Extension from image format + image_format = self._get_attr("redshiftOptions.imageFormat") # integer + ext = mel.eval("redshiftGetImageExtension(%i)" % image_format) + + use_ref_aovs = self.render_instance.data.get( + "useReferencedAovs", False) or False + + aovs = cmds.ls(type="RedshiftAOV") + if not use_ref_aovs: + ref_aovs = cmds.ls(type="RedshiftAOV", referencedNodes=True) + aovs = list(set(aovs) - set(ref_aovs)) + + products = [] + global_aov_enabled = bool( + self._get_attr("redshiftOptions.aovGlobalEnableMode", as_string=False) + ) + colorspace = lib.get_color_management_output_transform() + if not global_aov_enabled: + # only beauty output + for camera in cameras: + products.insert(0, + RenderProduct(productName="", + ext=ext, + multipart=self.multipart, + camera=camera, + colorspace=colorspace)) + return products + + light_groups_enabled = False + has_beauty_aov = False + + for aov in aovs: + enabled = self._get_attr(aov, "enabled") + if not enabled: + continue + + aov_type = self._get_attr(aov, "aovType") + if self.multipart and aov_type not in self.unmerged_aovs: + continue + + # Any AOVs that still get processed, like Cryptomatte + # by themselves are not multipart files. + + # Redshift skips rendering of masterlayer without AOV suffix + # when a Beauty AOV is rendered. It overrides the main layer. + if aov_type == "Beauty": + has_beauty_aov = True + + aov_name = self._get_attr(aov, "name") + + # Support light Groups + light_groups = [] + if self._get_attr(aov, "supportsLightGroups"): + all_light_groups = self._get_attr(aov, "allLightGroups") + if all_light_groups: + # All light groups is enabled + light_groups = self._get_redshift_light_groups() + else: + value = self._get_attr(aov, "lightGroupList") + # note: string value can return None when never set + if value: + selected_light_groups = value.strip().split() + light_groups = selected_light_groups + + for light_group in light_groups: + aov_light_group_name = "{}_{}".format(aov_name, + light_group) + for camera in cameras: + product = RenderProduct( + productName=aov_light_group_name, + aov=aov_name, + ext=ext, + multipart=False, + camera=camera, + driver=aov, + colorspace=colorspace) + products.append(product) + + if light_groups: + light_groups_enabled = True + + # Redshift AOV Light Select always renders the global AOV + # even when light groups are present so we don't need to + # exclude it when light groups are active + for camera in cameras: + product = RenderProduct(productName=aov_name, + aov=aov_name, + ext=ext, + multipart=False, + camera=camera, + driver=aov, + colorspace=colorspace) + products.append(product) + + # When a Beauty AOV is added manually, it will be rendered as + # 'Beauty_other' in file name and "standard" beauty will have + # 'Beauty' in its name. When disabled, standard output will be + # without `Beauty`. Except when using light groups. + if light_groups_enabled: + return products + + beauty_name = "BeautyAux" if has_beauty_aov else "" + for camera in cameras: + products.insert(0, + RenderProduct(productName=beauty_name, + ext=ext, + multipart=self.multipart, + camera=camera, + colorspace=colorspace)) + + return products + + @staticmethod + def _get_redshift_light_groups(): + return sorted(mel.eval("redshiftAllAovLightGroups")) + + +class RenderProductsRenderman(ARenderProducts): + """Expected files for Renderman renderer. + + Warning: + This is very rudimentary and needs more love and testing. + """ + + renderer = "renderman" + unmerged_aovs = {"PxrCryptomatte"} + + def get_multipart(self): + # Implemented as display specific in "get_render_products". + return False + + def get_render_products(self): + """Get all AOVs. + + See Also: + :func:`ARenderProducts.get_render_products()` + + """ + from rfm2.api.displays import get_displays # noqa + + colorspace = lib.get_color_management_output_transform() + + cameras = [ + self.sanitize_camera_name(c) + for c in self.get_renderable_cameras() + ] + + if not cameras: + cameras = [ + self.sanitize_camera_name( + self.get_renderable_cameras()[0]) + ] + products = [] + + # NOTE: This is guessing extensions from renderman display types. + # Some of them are just framebuffers, d_texture format can be + # set in display setting. We set those now to None, but it + # should be handled more gracefully. + display_types = { + "d_deepexr": "exr", + "d_it": None, + "d_null": None, + "d_openexr": "exr", + "d_png": "png", + "d_pointcloud": "ptc", + "d_targa": "tga", + "d_texture": None, + "d_tiff": "tif" + } + + displays = get_displays(override_dst="render")["displays"] + for name, display in displays.items(): + enabled = display["params"]["enable"]["value"] + if not enabled: + continue + + # Skip display types not producing any file output. + # Is there a better way to do it? + if not display_types.get(display["driverNode"]["type"]): + continue + + has_cryptomatte = cmds.ls(type=self.unmerged_aovs) + matte_enabled = False + if has_cryptomatte: + for cryptomatte in has_cryptomatte: + cryptomatte_aov = cryptomatte + matte_name = "cryptomatte" + rman_globals = cmds.listConnections(cryptomatte + + ".message") + if rman_globals: + matte_enabled = True + + aov_name = name + if aov_name == "rmanDefaultDisplay": + aov_name = "beauty" + + extensions = display_types.get( + display["driverNode"]["type"], "exr") + + for camera in cameras: + # Create render product and set it as multipart only on + # display types supporting it. In all other cases, Renderman + # will create separate output per channel. + if display["driverNode"]["type"] in ["d_openexr", "d_deepexr", "d_tiff"]: # noqa + product = RenderProduct( + productName=aov_name, + ext=extensions, + camera=camera, + multipart=True, + colorspace=colorspace + ) + + if has_cryptomatte and matte_enabled: + cryptomatte = RenderProduct( + productName=matte_name, + aov=cryptomatte_aov, + ext=extensions, + camera=camera, + multipart=True, + colorspace=colorspace + ) + else: + # this code should handle the case where no multipart + # capable format is selected. But since it involves + # shady logic to determine what channel become what + # lets not do that as all productions will use exr anyway. + """ + for channel in display['params']['displayChannels']['value']: # noqa + product = RenderProduct( + productName="{}_{}".format(aov_name, channel), + ext=extensions, + camera=camera, + multipart=False + ) + """ + raise UnsupportedImageFormatException( + "Only exr, deep exr and tiff formats are supported.") + + products.append(product) + + if has_cryptomatte and matte_enabled: + products.append(cryptomatte) + + return products + + def get_files(self, product): + """Get expected files. + + """ + files = super(RenderProductsRenderman, self).get_files(product) + + layer_data = self.layer_data + new_files = [] + + resolved_image_dir = re.sub("", layer_data.sceneName, RENDERMAN_IMAGE_DIR, flags=re.IGNORECASE) # noqa: E501 + resolved_image_dir = re.sub("", layer_data.layerName, resolved_image_dir, flags=re.IGNORECASE) # noqa: E501 + for file in files: + new_file = "{}/{}".format(resolved_image_dir, file) + new_files.append(new_file) + + return new_files + + +class RenderProductsMayaHardware(ARenderProducts): + """Expected files for MayaHardware renderer.""" + + renderer = "mayahardware2" + + extensions = [ + {"label": "JPEG", "index": 8, "extension": "jpg"}, + {"label": "PNG", "index": 32, "extension": "png"}, + {"label": "EXR(exr)", "index": 40, "extension": "exr"} + ] + + def get_multipart(self): + # MayaHardware does not support multipart EXRs. + return False + + def _get_extension(self, value): + result = None + if isinstance(value, int): + extensions = { + extension["index"]: extension["extension"] + for extension in self.extensions + } + try: + result = extensions[value] + except KeyError: + raise NotImplementedError( + "Could not find extension for {}".format(value) + ) + + if isinstance(value, six.string_types): + extensions = { + extension["label"]: extension["extension"] + for extension in self.extensions + } + try: + result = extensions[value] + except KeyError: + raise NotImplementedError( + "Could not find extension for {}".format(value) + ) + + if not result: + raise NotImplementedError( + "Could not find extension for {}".format(value) + ) + + return result + + def get_render_products(self): + """Get all AOVs. + See Also: + :func:`ARenderProducts.get_render_products()` + """ + ext = self._get_extension( + self._get_attr("defaultRenderGlobals.imageFormat") + ) + + products = [] + for cam in self.get_renderable_cameras(): + product = RenderProduct( + productName="beauty", + ext=ext, + camera=cam, + colorspace=lib.get_color_management_output_transform() + ) + products.append(product) + + return products + + +class AOVError(Exception): + """Custom exception for determining AOVs.""" + + +class UnsupportedRendererException(Exception): + """Custom exception. + + Raised when requesting data from unsupported renderer. + """ + + +class UnsupportedImageFormatException(Exception): + """Custom exception to report unsupported output image format.""" diff --git a/client/ayon_maya/api/lib_rendersettings.py b/client/ayon_maya/api/lib_rendersettings.py new file mode 100644 index 00000000..f7f3f1d7 --- /dev/null +++ b/client/ayon_maya/api/lib_rendersettings.py @@ -0,0 +1,410 @@ +# -*- coding: utf-8 -*- +"""Class for handling Render Settings.""" +import six +import sys + +from ayon_core.lib import Logger +from ayon_core.settings import get_project_settings + +from ayon_core.pipeline import CreatorError, get_current_project_name +from ayon_core.pipeline.context_tools import get_current_folder_entity +from ayon_maya.api.lib import reset_frame_range + + +class RenderSettings(object): + + _image_prefix_nodes = { + 'vray': 'vraySettings.fileNamePrefix', + 'arnold': 'defaultRenderGlobals.imageFilePrefix', + 'renderman': 'rmanGlobals.imageFileFormat', + 'redshift': 'defaultRenderGlobals.imageFilePrefix', + 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix' + } + + _aov_chars = { + "dot": ".", + "dash": "-", + "underscore": "_" + } + + log = Logger.get_logger("RenderSettings") + + @classmethod + def get_image_prefix_attr(cls, renderer): + return cls._image_prefix_nodes[renderer] + + @staticmethod + def get_padding_attr(renderer): + """Return attribute for renderer that defines frame padding amount""" + if renderer == "vray": + return "vraySettings.fileNamePadding" + else: + return "defaultRenderGlobals.extensionPadding" + + def __init__(self, project_settings=None): + if not project_settings: + project_settings = get_project_settings( + get_current_project_name() + ) + render_settings = project_settings["maya"]["render_settings"] + image_prefixes = { + "vray": render_settings["vray_renderer"]["image_prefix"], + "arnold": render_settings["arnold_renderer"]["image_prefix"], + "renderman": render_settings["renderman_renderer"]["image_prefix"], + "redshift": render_settings["redshift_renderer"]["image_prefix"] + } + + # TODO probably should be stored to more explicit attribute + # Renderman only + renderman_settings = render_settings["renderman_renderer"] + _image_dir = { + "renderman": renderman_settings["image_dir"], + "cryptomatte": renderman_settings["cryptomatte_dir"], + "imageDisplay": renderman_settings["imageDisplay_dir"], + "watermark": renderman_settings["watermark_dir"] + } + self._image_prefixes = image_prefixes + self._image_dir = _image_dir + self._project_settings = project_settings + + def set_default_renderer_settings(self, renderer=None): + """Set basic settings based on renderer.""" + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + if not renderer: + renderer = cmds.getAttr( + 'defaultRenderGlobals.currentRenderer').lower() + + folder_entity = get_current_folder_entity() + folder_attributes = folder_entity["attrib"] + # project_settings/maya/create/CreateRender/aov_separator + try: + aov_separator = self._aov_chars[( + self._project_settings["maya"] + ["render_settings"] + ["aov_separator"] + )] + except KeyError: + aov_separator = "_" + reset_frame = self._project_settings["maya"]["render_settings"]["reset_current_frame"] # noqa + + if reset_frame: + start_frame = cmds.getAttr("defaultRenderGlobals.startFrame") + cmds.currentTime(start_frame, edit=True) + + if renderer in self._image_prefix_nodes: + prefix = self._image_prefixes[renderer] + prefix = prefix.replace("{aov_separator}", aov_separator) + cmds.setAttr(self._image_prefix_nodes[renderer], + prefix, type="string") # noqa + else: + print("{0} isn't a supported renderer to autoset settings.".format(renderer)) # noqa + # TODO: handle not having res values in the doc + width = folder_attributes.get("resolutionWidth") + height = folder_attributes.get("resolutionHeight") + + if renderer == "arnold": + # set renderer settings for Arnold from project settings + self._set_arnold_settings(width, height) + + if renderer == "vray": + self._set_vray_settings(aov_separator, width, height) + + if renderer == "redshift": + self._set_redshift_settings(width, height) + mel.eval("redshiftUpdateActiveAovList") + + if renderer == "renderman": + image_dir = self._image_dir["renderman"] + cmds.setAttr("rmanGlobals.imageOutputDir", + image_dir, type="string") + self._set_renderman_settings(width, height, + aov_separator) + + def _set_arnold_settings(self, width, height): + """Sets settings for Arnold.""" + from mtoa.core import createOptions # noqa + from mtoa.aovs import AOVInterface # noqa + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + createOptions() + render_settings = self._project_settings["maya"]["render_settings"] + arnold_render_presets = render_settings["arnold_renderer"] # noqa + # Force resetting settings and AOV list to avoid having to deal with + # AOV checking logic, for now. + # This is a work around because the standard + # function to revert render settings does not reset AOVs list in MtoA + # Fetch current aovs in case there's any. + current_aovs = AOVInterface().getAOVs() + remove_aovs = render_settings["remove_aovs"] + if remove_aovs: + # Remove fetched AOVs + AOVInterface().removeAOVs(current_aovs) + mel.eval("unifiedRenderGlobalsRevertToDefault") + img_ext = arnold_render_presets["image_format"] + img_prefix = arnold_render_presets["image_prefix"] + aovs = arnold_render_presets["aov_list"] + img_tiled = arnold_render_presets["tiled"] + multi_exr = arnold_render_presets["multilayer_exr"] + additional_options = arnold_render_presets["additional_options"] + for aov in aovs: + if aov in current_aovs and not remove_aovs: + continue + AOVInterface('defaultArnoldRenderOptions').addAOV(aov) + + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + + self._set_global_output_settings() + + cmds.setAttr( + "defaultRenderGlobals.imageFilePrefix", img_prefix, type="string") + + cmds.setAttr( + "defaultArnoldDriver.ai_translator", img_ext, type="string") + + cmds.setAttr( + "defaultArnoldDriver.exrTiled", img_tiled) + + cmds.setAttr( + "defaultArnoldDriver.mergeAOVs", multi_exr) + self._additional_attribs_setter(additional_options) + reset_frame_range(playback=False, fps=False, render=True) + + def _set_redshift_settings(self, width, height): + """Sets settings for Redshift.""" + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + render_settings = self._project_settings["maya"]["render_settings"] + redshift_render_presets = render_settings["redshift_renderer"] + + remove_aovs = render_settings["remove_aovs"] + all_rs_aovs = cmds.ls(type='RedshiftAOV') + if remove_aovs: + for aov in all_rs_aovs: + enabled = cmds.getAttr("{}.enabled".format(aov)) + if enabled: + cmds.delete(aov) + + redshift_aovs = redshift_render_presets["aov_list"] + # list all the aovs + all_rs_aovs = cmds.ls(type='RedshiftAOV') + for rs_aov in redshift_aovs: + rs_layername = "rsAov_{}".format(rs_aov.replace(" ", "")) + if rs_layername in all_rs_aovs: + continue + cmds.rsCreateAov(type=rs_aov) + # update the AOV list + mel.eval("redshiftUpdateActiveAovList") + + rs_p_engine = redshift_render_presets["primary_gi_engine"] + rs_s_engine = redshift_render_presets["secondary_gi_engine"] + + if int(rs_p_engine) or int(rs_s_engine) != 0: + cmds.setAttr("redshiftOptions.GIEnabled", 1) + if int(rs_p_engine) == 0: + # reset the primary GI Engine as default + cmds.setAttr("redshiftOptions.primaryGIEngine", 4) + if int(rs_s_engine) == 0: + # reset the secondary GI Engine as default + cmds.setAttr("redshiftOptions.secondaryGIEngine", 2) + else: + cmds.setAttr("redshiftOptions.GIEnabled", 0) + + cmds.setAttr("redshiftOptions.primaryGIEngine", int(rs_p_engine)) + cmds.setAttr("redshiftOptions.secondaryGIEngine", int(rs_s_engine)) + + additional_options = redshift_render_presets["additional_options"] + ext = redshift_render_presets["image_format"] + img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] + img_ext = img_exts.index(ext) + + self._set_global_output_settings() + cmds.setAttr("redshiftOptions.imageFormat", img_ext) + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + self._additional_attribs_setter(additional_options) + + def _set_renderman_settings(self, width, height, aov_separator): + """Sets settings for Renderman""" + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + rman_render_presets = ( + self._project_settings + ["maya"] + ["render_settings"] + ["renderman_renderer"] + ) + display_filters = rman_render_presets["display_filters"] + d_filters_number = len(display_filters) + for i in range(d_filters_number): + d_node = cmds.ls(typ=display_filters[i]) + if len(d_node) > 0: + filter_nodes = d_node[0] + else: + filter_nodes = cmds.createNode(display_filters[i]) + + cmds.connectAttr(filter_nodes + ".message", + "rmanGlobals.displayFilters[%i]" % i, + force=True) + if filter_nodes.startswith("PxrImageDisplayFilter"): + imageDisplay_dir = self._image_dir["imageDisplay"] + imageDisplay_dir = imageDisplay_dir.replace("{aov_separator}", + aov_separator) + cmds.setAttr(filter_nodes + ".filename", + imageDisplay_dir, type="string") + + sample_filters = rman_render_presets["sample_filters"] + s_filters_number = len(sample_filters) + for n in range(s_filters_number): + s_node = cmds.ls(typ=sample_filters[n]) + if len(s_node) > 0: + filter_nodes = s_node[0] + else: + filter_nodes = cmds.createNode(sample_filters[n]) + + cmds.connectAttr(filter_nodes + ".message", + "rmanGlobals.sampleFilters[%i]" % n, + force=True) + + if filter_nodes.startswith("PxrCryptomatte"): + matte_dir = self._image_dir["cryptomatte"] + matte_dir = matte_dir.replace("{aov_separator}", + aov_separator) + cmds.setAttr(filter_nodes + ".filename", + matte_dir, type="string") + elif filter_nodes.startswith("PxrWatermarkFilter"): + watermark_dir = self._image_dir["watermark"] + watermark_dir = watermark_dir.replace("{aov_separator}", + aov_separator) + cmds.setAttr(filter_nodes + ".filename", + watermark_dir, type="string") + + additional_options = rman_render_presets["additional_options"] + + self._set_global_output_settings() + cmds.setAttr("defaultResolution.width", width) + cmds.setAttr("defaultResolution.height", height) + self._additional_attribs_setter(additional_options) + + def _set_vray_settings(self, aov_separator, width, height): + # type: (str, int, int) -> None + """Sets important settings for Vray.""" + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + + settings = cmds.ls(type="VRaySettingsNode") + node = settings[0] if settings else cmds.createNode("VRaySettingsNode") + render_settings = self._project_settings["maya"]["render_settings"] + vray_render_presets = render_settings["vray_renderer"] + # vrayRenderElement + remove_aovs = render_settings["remove_aovs"] + all_vray_aovs = cmds.ls(type='VRayRenderElement') + lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') + if remove_aovs: + for aov in all_vray_aovs: + # remove all aovs except LightSelect + enabled = cmds.getAttr("{}.enabled".format(aov)) + if enabled: + cmds.delete(aov) + # remove LightSelect + for light_aovs in lightSelect_aovs: + light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) + if light_enabled: + cmds.delete(lightSelect_aovs) + + vray_aovs = vray_render_presets["aov_list"] + for renderlayer in vray_aovs: + renderElement = "vrayAddRenderElement {}".format(renderlayer) + RE_name = mel.eval(renderElement) + # if there is more than one same render element + if RE_name.endswith("1"): + cmds.delete(RE_name) + # Set aov separator + # First we need to explicitly set the UI items in Render Settings + # because that is also what V-Ray updates to when that Render Settings + # UI did initialize before and refreshes again. + MENU = "vrayRenderElementSeparator" + if cmds.optionMenuGrp(MENU, query=True, exists=True): + items = cmds.optionMenuGrp(MENU, query=True, ill=True) + separators = [cmds.menuItem(i, query=True, label=True) for i in items] # noqa: E501 + try: + sep_idx = separators.index(aov_separator) + except ValueError: + six.reraise( + CreatorError, + CreatorError( + "AOV character {} not in {}".format( + aov_separator, separators)), + sys.exc_info()[2]) + + cmds.optionMenuGrp(MENU, edit=True, select=sep_idx + 1) + + # Set the render element attribute as string. This is also what V-Ray + # sets whenever the `vrayRenderElementSeparator` menu items switch + cmds.setAttr( + "{}.fileNameRenderElementSeparator".format(node), + aov_separator, + type="string" + ) + + # Set render file format to exr + ext = vray_render_presets["image_format"] + cmds.setAttr("{}.imageFormatStr".format(node), ext, type="string") + + # animType + cmds.setAttr("{}.animType".format(node), 1) + + # resolution + cmds.setAttr("{}.width".format(node), width) + cmds.setAttr("{}.height".format(node), height) + + additional_options = vray_render_presets["additional_options"] + + self._additional_attribs_setter(additional_options) + + @staticmethod + def _set_global_output_settings(): + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + # enable animation + cmds.setAttr("defaultRenderGlobals.outFormatControl", 0) + cmds.setAttr("defaultRenderGlobals.animation", 1) + cmds.setAttr("defaultRenderGlobals.putFrameBeforeExt", 1) + cmds.setAttr("defaultRenderGlobals.extensionPadding", 4) + + def _additional_attribs_setter(self, additional_attribs): + # Not all hosts can import this module. + from maya import cmds # noqa: F401 + import maya.mel as mel # noqa: F401 + + for item in additional_attribs: + attribute = item["attribute"] + value = item["value"] + attribute = str(attribute) # ensure str conversion from settings + attribute_type = cmds.getAttr(attribute, type=True) + if attribute_type in {"long", "bool"}: + cmds.setAttr(attribute, int(value)) + elif attribute_type == "string": + cmds.setAttr(attribute, str(value), type="string") + elif attribute_type in {"double", "doubleAngle", "doubleLinear"}: + cmds.setAttr(attribute, float(value)) + else: + self.log.error( + "Attribute {attribute} can not be set due to unsupported " + "type: {attribute_type}".format( + attribute=attribute, + attribute_type=attribute_type) + ) diff --git a/client/ayon_maya/api/lib_rendersetup.py b/client/ayon_maya/api/lib_rendersetup.py new file mode 100644 index 00000000..d93e6af0 --- /dev/null +++ b/client/ayon_maya/api/lib_rendersetup.py @@ -0,0 +1,417 @@ +# -*- coding: utf-8 -*- +"""Code to get attributes from render layer without switching to it. + +https://github.com/Colorbleed/colorbleed-config/blob/acre/colorbleed/maya/lib_rendersetup.py +Credits: Roy Nieterau (BigRoy) / Colorbleed +Modified for use in AYON + +""" + +from maya import cmds +import maya.api.OpenMaya as om +import logging + +import maya.app.renderSetup.model.utils as utils +from maya.app.renderSetup.model import renderSetup +from maya.app.renderSetup.model.override import ( + AbsOverride, + RelOverride, + UniqueOverride +) + +from ayon_maya.api.lib import get_attribute + +EXACT_MATCH = 0 +PARENT_MATCH = 1 +CLIENT_MATCH = 2 + +DEFAULT_RENDER_LAYER = "defaultRenderLayer" + +log = logging.getLogger(__name__) + + +def get_rendersetup_layer(layer): + """Return render setup layer name. + + This also converts names from legacy renderLayer node name to render setup + name. + + Note: `defaultRenderLayer` is not a renderSetupLayer node but it is however + the valid layer name for Render Setup - so we return that as is. + + Example: + >>> for legacy_layer in cmds.ls(type="renderLayer"): + >>> layer = get_rendersetup_layer(legacy_layer) + + Returns: + str or None: Returns renderSetupLayer node name if `layer` is a valid + layer name in legacy renderlayers or render setup layers. + Returns None if the layer can't be found or Render Setup is + currently disabled. + + + """ + if layer == DEFAULT_RENDER_LAYER: + # defaultRenderLayer doesn't have a `renderSetupLayer` + return layer + + if not cmds.mayaHasRenderSetup(): + return None + + if not cmds.objExists(layer): + return None + + if cmds.nodeType(layer) == "renderSetupLayer": + return layer + + # By default Render Setup renames the legacy renderlayer + # to `rs_` but lets not rely on that as the + # layer node can be renamed manually + connections = cmds.listConnections(layer + ".message", + type="renderSetupLayer", + exactType=True, + source=False, + destination=True, + plugs=True) or [] + return next((conn.split(".", 1)[0] for conn in connections + if conn.endswith(".legacyRenderLayer")), None) + + +def get_attr_in_layer(node_attr, layer, as_string=True): + """Return attribute value in Render Setup layer. + + This will only work for attributes which can be + retrieved with `maya.cmds.getAttr` and for which + Relative and Absolute overrides are applicable. + + Examples: + >>> get_attr_in_layer("defaultResolution.width", layer="layer1") + >>> get_attr_in_layer("defaultRenderGlobals.startFrame", layer="layer") + >>> get_attr_in_layer("transform.translate", layer="layer3") + + Args: + attr (str): attribute name as 'node.attribute' + layer (str): layer name + + Returns: + object: attribute value in layer + + """ + + def _layer_needs_update(layer): + """Return whether layer needs updating.""" + # Use `getattr` as e.g. DEFAULT_RENDER_LAYER does not have + # the attribute + return getattr(layer, "needsMembershipUpdate", False) or \ + getattr(layer, "needsApplyUpdate", False) + + def get_default_layer_value(node_attr_): + """Return attribute value in `DEFAULT_RENDER_LAYER`.""" + inputs = cmds.listConnections(node_attr_, + source=True, + destination=False, + # We want to skip conversion nodes since + # an override to `endFrame` could have + # a `unitToTimeConversion` node + # in-between + skipConversionNodes=True, + type="applyOverride") or [] + if inputs: + override = inputs[0] + history_overrides = cmds.ls(cmds.listHistory(override, + pruneDagObjects=True), + type="applyOverride") + node = history_overrides[-1] if history_overrides else override + node_attr_ = node + ".original" + + return get_attribute(node_attr_, asString=as_string) + + layer = get_rendersetup_layer(layer) + rs = renderSetup.instance() + current_layer = rs.getVisibleRenderLayer() + if current_layer.name() == layer: + + # Ensure layer is up-to-date + if _layer_needs_update(current_layer): + try: + rs.switchToLayer(current_layer) + except RuntimeError: + # Some cases can cause errors on switching + # the first time with Render Setup layers + # e.g. different overrides to compounds + # and its children plugs. So we just force + # it another time. If it then still fails + # we will let it error out. + rs.switchToLayer(current_layer) + + return get_attribute(node_attr, asString=as_string) + + overrides = get_attr_overrides(node_attr, layer) + default_layer_value = get_default_layer_value(node_attr) + if not overrides: + return default_layer_value + + value = default_layer_value + for match, layer_override, index in overrides: + if isinstance(layer_override, AbsOverride): + # Absolute override + value = get_attribute(layer_override.name() + ".attrValue") + if match == EXACT_MATCH: + # value = value + pass + elif match == PARENT_MATCH: + value = value[index] + elif match == CLIENT_MATCH: + value[index] = value + + elif isinstance(layer_override, RelOverride): + # Relative override + # Value = Original * Multiply + Offset + multiply = get_attribute(layer_override.name() + ".multiply") + offset = get_attribute(layer_override.name() + ".offset") + + if match == EXACT_MATCH: + value = value * multiply + offset + elif match == PARENT_MATCH: + value = value * multiply[index] + offset[index] + elif match == CLIENT_MATCH: + value[index] = value[index] * multiply + offset + + else: + raise TypeError("Unsupported override: %s" % layer_override) + + return value + + +def get_attr_overrides(node_attr, layer, + skip_disabled=True, + skip_local_render=True, + stop_at_absolute_override=True): + """Return all Overrides applicable to the attribute. + + Overrides are returned as a 3-tuple: + (Match, Override, Index) + + Match: + This is any of EXACT_MATCH, PARENT_MATCH, CLIENT_MATCH + and defines whether the override is exactly on the + plug, on the parent or on a child plug. + + Override: + This is the RenderSetup Override instance. + + Index: + This is the Plug index under the parent or for + the child that matches. The EXACT_MATCH index will + always be None. For PARENT_MATCH the index is which + index the plug is under the parent plug. For CLIENT_MATCH + the index is which child index matches the plug. + + Args: + node_attr (str): attribute name as 'node.attribute' + layer (str): layer name + skip_disabled (bool): exclude disabled overrides + skip_local_render (bool): exclude overrides marked + as local render. + stop_at_absolute_override: exclude overrides prior + to the last absolute override as they have + no influence on the resulting value. + + Returns: + list: Ordered Overrides in order of strength + + """ + + def get_mplug_children(plug): + """Return children MPlugs of compound `MPlug`.""" + children = [] + if plug.isCompound: + for i in range(plug.numChildren()): + children.append(plug.child(i)) + return children + + def get_mplug_names(mplug): + """Return long and short name of `MPlug`.""" + long_name = mplug.partialName(useLongNames=True) + short_name = mplug.partialName(useLongNames=False) + return {long_name, short_name} + + def iter_override_targets(override): + try: + for target in override._targets(): + yield target + except AssertionError: + # Workaround: There is a bug where the private `_targets()` method + # fails on some attribute plugs. For example overrides + # to the defaultRenderGlobals.endFrame + # (Tested in Maya 2020.2) + log.debug("Workaround for %s" % override) + from maya.app.renderSetup.common.utils import findPlug + + attr = override.attributeName() + if isinstance(override, UniqueOverride): + node = override.targetNodeName() + yield findPlug(node, attr) + else: + nodes = override.parent().selector().nodes() + for node in nodes: + if cmds.attributeQuery(attr, node=node, exists=True): + yield findPlug(node, attr) + + # Get the MPlug for the node.attr + sel = om.MSelectionList() + sel.add(node_attr) + plug = sel.getPlug(0) + + layer = get_rendersetup_layer(layer) + if layer == DEFAULT_RENDER_LAYER: + # DEFAULT_RENDER_LAYER will never have overrides + # since it's the default layer + return [] + + rs_layer = renderSetup.instance().getRenderLayer(layer) + if rs_layer is None: + # Renderlayer does not exist + return + + # Get any parent or children plugs as we also + # want to include them in the attribute match + # for overrides + parent = plug.parent() if plug.isChild else None + parent_index = None + if parent: + parent_index = get_mplug_children(parent).index(plug) + + children = get_mplug_children(plug) + + # Create lookup for the attribute by both long + # and short names + attr_names = get_mplug_names(plug) + for child in children: + attr_names.update(get_mplug_names(child)) + if parent: + attr_names.update(get_mplug_names(parent)) + + # Get all overrides of the layer + # And find those that are relevant to the attribute + plug_overrides = [] + + # Iterate over the overrides in reverse so we get the last + # overrides first and can "break" whenever an absolute + # override is reached + layer_overrides = list(utils.getOverridesRecursive(rs_layer)) + for layer_override in reversed(layer_overrides): + + if skip_disabled and not layer_override.isEnabled(): + # Ignore disabled overrides + continue + + if skip_local_render and layer_override.isLocalRender(): + continue + + # The targets list can be very large so we'll do + # a quick filter by attribute name to detect whether + # it matches the attribute name, or its parent or child + if layer_override.attributeName() not in attr_names: + continue + + override_match = None + for override_plug in iter_override_targets(layer_override): + + override_match = None + if plug == override_plug: + override_match = (EXACT_MATCH, layer_override, None) + + elif parent and override_plug == parent: + override_match = (PARENT_MATCH, layer_override, parent_index) + + elif children and override_plug in children: + child_index = children.index(override_plug) + override_match = (CLIENT_MATCH, layer_override, child_index) + + if override_match: + plug_overrides.append(override_match) + break + + if ( + override_match and + stop_at_absolute_override and + isinstance(layer_override, AbsOverride) and + # When the override is only on a child plug then it doesn't + # override the entire value so we not stop at this override + not override_match[0] == CLIENT_MATCH + ): + # If override is absolute override, then BREAK out + # of parent loop we don't need to look any further as + # this is the absolute override + break + + return reversed(plug_overrides) + + +def get_shader_in_layer(node, layer): + """Return the assigned shader in a renderlayer without switching layers. + + This has been developed and tested for Legacy Renderlayers and *not* for + Render Setup. + + Note: This will also return the shader for any face assignments, however + it will *not* return the components they are assigned to. This could + be implemented, but since Maya's renderlayers are famous for breaking + with face assignments there has been no need for this function to + support that. + + Returns: + list: The list of assigned shaders in the given layer. + + """ + + def _get_connected_shader(plug): + """Return current shader""" + return cmds.listConnections(plug, + source=False, + destination=True, + plugs=False, + connections=False, + type="shadingEngine") or [] + + # We check the instObjGroups (shader connection) for layer overrides. + plug = node + ".instObjGroups" + + # Ignore complex query if we're in the layer anyway (optimization) + current_layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + if layer == current_layer: + return _get_connected_shader(plug) + + connections = cmds.listConnections(plug, + plugs=True, + source=False, + destination=True, + type="renderLayer") or [] + connections = filter(lambda x: x.endswith(".outPlug"), connections) + if not connections: + # If no overrides anywhere on the shader, just get the current shader + return _get_connected_shader(plug) + + def _get_override(connections, layer): + """Return the overridden connection for that layer in connections""" + # If there's an override on that layer, return that. + for connection in connections: + if (connection.startswith(layer + ".outAdjustments") and + connection.endswith(".outPlug")): + + # This is a shader override on that layer so get the shader + # connected to .outValue of the .outAdjustment[i] + out_adjustment = connection.rsplit(".", 1)[0] + connection_attr = out_adjustment + ".outValue" + override = cmds.listConnections(connection_attr) or [] + + return override + + override_shader = _get_override(connections, layer) + if override_shader is not None: + return override_shader + else: + # Get the override for "defaultRenderLayer" (=masterLayer) + return _get_override(connections, layer="defaultRenderLayer") diff --git a/client/ayon_maya/api/menu.py b/client/ayon_maya/api/menu.py new file mode 100644 index 00000000..153aff07 --- /dev/null +++ b/client/ayon_maya/api/menu.py @@ -0,0 +1,286 @@ +import os +import json +import logging +from functools import partial + +from qtpy import QtWidgets, QtGui + +import maya.utils +import maya.cmds as cmds + +from ayon_core.pipeline import ( + get_current_folder_path, + get_current_task_name, + registered_host +) +from ayon_core.pipeline.workfile import BuildWorkfile +from ayon_core.tools.utils import host_tools +from ayon_maya.api import lib, lib_rendersettings +from .lib import get_main_window, IS_HEADLESS +from ..tools import show_look_assigner + +from .workfile_template_builder import ( + create_placeholder, + update_placeholder, + build_workfile_template, + update_workfile_template +) +from ayon_core.tools.workfile_template_build import open_template_ui +from .workfile_template_builder import MayaTemplateBuilder + +log = logging.getLogger(__name__) + +MENU_NAME = "op_maya_menu" + + +def _get_menu(menu_name=None): + """Return the menu instance if it currently exists in Maya""" + if menu_name is None: + menu_name = MENU_NAME + + widgets = {w.objectName(): w for w in QtWidgets.QApplication.allWidgets()} + return widgets.get(menu_name) + + +def get_context_label(): + return "{}, {}".format( + get_current_folder_path(), + get_current_task_name() + ) + + +def install(project_settings): + if cmds.about(batch=True): + log.info("Skipping AYON menu initialization in batch mode..") + return + + def add_menu(): + pyblish_icon = host_tools.get_pyblish_icon() + parent_widget = get_main_window() + cmds.menu( + MENU_NAME, + label=os.environ.get("AYON_MENU_LABEL") or "AYON", + tearOff=True, + parent="MayaWindow" + ) + + # Create context menu + cmds.menuItem( + "currentContext", + label=get_context_label(), + parent=MENU_NAME, + enable=False + ) + + cmds.setParent("..", menu=True) + + cmds.menuItem(divider=True) + + cmds.menuItem( + "Create...", + command=lambda *args: host_tools.show_publisher( + parent=parent_widget, + tab="create" + ) + ) + + cmds.menuItem( + "Load...", + command=lambda *args: host_tools.show_loader( + parent=parent_widget, + use_context=True + ) + ) + + cmds.menuItem( + "Publish...", + command=lambda *args: host_tools.show_publisher( + parent=parent_widget, + tab="publish" + ), + image=pyblish_icon + ) + + cmds.menuItem( + "Manage...", + command=lambda *args: host_tools.show_scene_inventory( + parent=parent_widget + ) + ) + + cmds.menuItem( + "Library...", + command=lambda *args: host_tools.show_library_loader( + parent=parent_widget + ) + ) + + cmds.menuItem(divider=True) + + cmds.menuItem( + "Work Files...", + command=lambda *args: host_tools.show_workfiles( + parent=parent_widget + ), + ) + + cmds.menuItem( + "Set Frame Range", + command=lambda *args: lib.reset_frame_range() + ) + + cmds.menuItem( + "Set Resolution", + command=lambda *args: lib.reset_scene_resolution() + ) + + cmds.menuItem( + "Set Colorspace", + command=lambda *args: lib.set_colorspace(), + ) + + cmds.menuItem( + "Set Render Settings", + command=lambda *args: lib_rendersettings.RenderSettings().set_default_renderer_settings() # noqa + ) + + cmds.menuItem(divider=True, parent=MENU_NAME) + cmds.menuItem( + "Build First Workfile", + parent=MENU_NAME, + command=lambda *args: BuildWorkfile().process() + ) + + cmds.menuItem( + "Look assigner...", + command=lambda *args: show_look_assigner( + parent_widget + ) + ) + + cmds.menuItem( + "Experimental tools...", + command=lambda *args: host_tools.show_experimental_tools_dialog( + parent_widget + ) + ) + + builder_menu = cmds.menuItem( + "Template Builder", + subMenu=True, + tearOff=True, + parent=MENU_NAME + ) + cmds.menuItem( + "Build Workfile from template", + parent=builder_menu, + command=build_workfile_template + ) + cmds.menuItem( + "Update Workfile from template", + parent=builder_menu, + command=update_workfile_template + ) + cmds.menuItem( + divider=True, + parent=builder_menu + ) + cmds.menuItem( + "Open Template", + parent=builder_menu, + command=lambda *args: open_template_ui( + MayaTemplateBuilder(registered_host()), get_main_window() + ), + ) + cmds.menuItem( + "Create Placeholder", + parent=builder_menu, + command=create_placeholder + ) + cmds.menuItem( + "Update Placeholder", + parent=builder_menu, + command=update_placeholder + ) + + cmds.setParent(MENU_NAME, menu=True) + + def add_scripts_menu(project_settings): + try: + import scriptsmenu.launchformaya as launchformaya + except ImportError: + log.warning( + "Skipping studio.menu install, because " + "'scriptsmenu' module seems unavailable." + ) + return + + menu_settings = project_settings["maya"]["scriptsmenu"] + menu_name = menu_settings["name"] + config = menu_settings["definition"] + + if menu_settings.get("definition_type") == "definition_json": + data = menu_settings["definition_json"] + try: + config = json.loads(data) + except json.JSONDecodeError as exc: + print("Skipping studio menu, error decoding JSON definition.") + log.error(exc) + return + + if not config: + log.warning("Skipping studio menu, no definition found.") + return + + # run the launcher for Maya menu + studio_menu = launchformaya.main( + title=menu_name.title(), + objectName=menu_name.title().lower().replace(" ", "_") + ) + + # apply configuration + studio_menu.build_from_configuration(studio_menu, config) + + # Allow time for uninstallation to finish. + # We use Maya's executeDeferred instead of QTimer.singleShot + # so that it only gets called after Maya UI has initialized too. + # This is crucial with Maya 2020+ which initializes without UI + # first as a QCoreApplication + maya.utils.executeDeferred(add_menu) + cmds.evalDeferred(partial(add_scripts_menu, project_settings), + lowestPriority=True) + + +def uninstall(): + menu = _get_menu() + if menu: + log.info("Attempting to uninstall ...") + + try: + menu.deleteLater() + del menu + except Exception as e: + log.error(e) + + +def popup(): + """Pop-up the existing menu near the mouse cursor.""" + menu = _get_menu() + cursor = QtGui.QCursor() + point = cursor.pos() + menu.exec_(point) + + +def update_menu_task_label(): + """Update the task label in AYON menu to current session""" + + if IS_HEADLESS: + return + + object_name = "{}|currentContext".format(MENU_NAME) + if not cmds.menuItem(object_name, query=True, exists=True): + log.warning("Can't find menuItem: {}".format(object_name)) + return + + label = get_context_label() + cmds.menuItem(object_name, edit=True, label=label) diff --git a/client/ayon_maya/api/pipeline.py b/client/ayon_maya/api/pipeline.py new file mode 100644 index 00000000..84268cc6 --- /dev/null +++ b/client/ayon_maya/api/pipeline.py @@ -0,0 +1,779 @@ +import json +import base64 +import os +import errno +import logging +import contextlib +import shutil + +from maya import utils, cmds, OpenMaya +import maya.api.OpenMaya as om + +import pyblish.api + +from ayon_core.settings import get_project_settings +from ayon_core.host import ( + HostBase, + IWorkfileHost, + ILoadHost, + IPublishHost, + HostDirmap, +) +from ayon_core.tools.utils import host_tools +from ayon_core.tools.workfiles.lock_dialog import WorkfileLockDialog +from ayon_core.lib import ( + register_event_callback, + emit_event +) +from ayon_core.pipeline import ( + get_current_project_name, + register_loader_plugin_path, + register_inventory_action_path, + register_creator_plugin_path, + register_workfile_build_plugin_path, + deregister_loader_plugin_path, + deregister_inventory_action_path, + deregister_creator_plugin_path, + deregister_workfile_build_plugin_path, + AYON_CONTAINER_ID, + AVALON_CONTAINER_ID, +) +from ayon_core.pipeline.load import any_outdated_containers +from ayon_core.pipeline.workfile.lock_workfile import ( + create_workfile_lock, + remove_workfile_lock, + is_workfile_locked, + is_workfile_lock_enabled +) +from ayon_maya import MAYA_ROOT_DIR +from ayon_maya.lib import create_workspace_mel + +from . import menu, lib +from .workio import ( + open_file, + save_file, + file_extensions, + has_unsaved_changes, + work_root, + current_file +) + +log = logging.getLogger("ayon_maya") + +PLUGINS_DIR = os.path.join(MAYA_ROOT_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build") + +AVALON_CONTAINERS = ":AVALON_CONTAINERS" + +# Track whether the workfile tool is about to save +_about_to_save = False + + +class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): + name = "maya" + + def __init__(self): + super(MayaHost, self).__init__() + self._op_events = {} + + def install(self): + project_name = get_current_project_name() + project_settings = get_project_settings(project_name) + # process path mapping + dirmap_processor = MayaDirmap("maya", project_name, project_settings) + dirmap_processor.process_dirmap() + + pyblish.api.register_plugin_path(PUBLISH_PATH) + pyblish.api.register_host("mayabatch") + pyblish.api.register_host("mayapy") + pyblish.api.register_host("maya") + + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + register_inventory_action_path(INVENTORY_PATH) + register_workfile_build_plugin_path(WORKFILE_BUILD_PATH) + + self.log.info("Installing callbacks ... ") + register_event_callback("init", on_init) + + _set_project() + + if lib.IS_HEADLESS: + self.log.info(( + "Running in headless mode, skipping Maya save/open/new" + " callback installation.." + )) + + return + + self._register_callbacks() + + menu.install(project_settings) + + register_event_callback("save", on_save) + register_event_callback("open", on_open) + register_event_callback("new", on_new) + register_event_callback("before.save", on_before_save) + register_event_callback("after.save", on_after_save) + register_event_callback("before.close", on_before_close) + register_event_callback("before.file.open", before_file_open) + register_event_callback("taskChanged", on_task_changed) + register_event_callback("workfile.open.before", before_workfile_open) + register_event_callback("workfile.save.before", before_workfile_save) + register_event_callback( + "workfile.save.before", workfile_save_before_xgen + ) + register_event_callback("workfile.save.after", after_workfile_save) + + def open_workfile(self, filepath): + return open_file(filepath) + + def save_workfile(self, filepath=None): + return save_file(filepath) + + def work_root(self, session): + return work_root(session) + + def get_current_workfile(self): + return current_file() + + def workfile_has_unsaved_changes(self): + return has_unsaved_changes() + + def get_workfile_extensions(self): + return file_extensions() + + def get_containers(self): + return ls() + + @contextlib.contextmanager + def maintained_selection(self): + with lib.maintained_selection(): + yield + + def get_context_data(self): + data = cmds.fileInfo("OpenPypeContext", query=True) + if not data: + return {} + + data = data[0] # Maya seems to return a list + decoded = base64.b64decode(data).decode("utf-8") + return json.loads(decoded) + + def update_context_data(self, data, changes): + json_str = json.dumps(data) + encoded = base64.b64encode(json_str.encode("utf-8")) + return cmds.fileInfo("OpenPypeContext", encoded) + + def _register_callbacks(self): + for handler, event in self._op_events.copy().items(): + if event is None: + continue + + try: + OpenMaya.MMessage.removeCallback(event) + self._op_events[handler] = None + except RuntimeError as exc: + self.log.info(exc) + + self._op_events[_on_scene_save] = OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kBeforeSave, _on_scene_save + ) + + self._op_events[_after_scene_save] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterSave, + _after_scene_save + ) + ) + + self._op_events[_before_scene_save] = ( + OpenMaya.MSceneMessage.addCheckCallback( + OpenMaya.MSceneMessage.kBeforeSaveCheck, + _before_scene_save + ) + ) + + self._op_events[_on_scene_new] = OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterNew, _on_scene_new + ) + + self._op_events[_on_maya_initialized] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kMayaInitialized, + _on_maya_initialized + ) + ) + + self._op_events[_on_scene_open] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kAfterOpen, + _on_scene_open + ) + ) + + self._op_events[_before_scene_open] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kBeforeOpen, + _before_scene_open + ) + ) + + self._op_events[_before_close_maya] = ( + OpenMaya.MSceneMessage.addCallback( + OpenMaya.MSceneMessage.kMayaExiting, + _before_close_maya + ) + ) + + self.log.info("Installed event handler _on_scene_save..") + self.log.info("Installed event handler _before_scene_save..") + self.log.info("Installed event handler _on_after_save..") + self.log.info("Installed event handler _on_scene_new..") + self.log.info("Installed event handler _on_maya_initialized..") + self.log.info("Installed event handler _on_scene_open..") + self.log.info("Installed event handler _check_lock_file..") + self.log.info("Installed event handler _before_close_maya..") + + +def _set_project(): + """Sets the maya project to the current Session's work directory. + + Returns: + None + + """ + workdir = os.getenv("AYON_WORKDIR") + + try: + os.makedirs(workdir) + except OSError as e: + # An already existing working directory is fine. + if e.errno == errno.EEXIST: + pass + else: + raise + + cmds.workspace(workdir, openWorkspace=True) + + +def _on_maya_initialized(*args): + emit_event("init") + + if cmds.about(batch=True): + log.warning("Running batch mode ...") + return + + # Keep reference to the main Window, once a main window exists. + lib.get_main_window() + + +def _on_scene_new(*args): + emit_event("new") + + +def _after_scene_save(*arg): + emit_event("after.save") + + +def _on_scene_save(*args): + emit_event("save") + + +def _on_scene_open(*args): + emit_event("open") + + +def _before_close_maya(*args): + emit_event("before.close") + + +def _before_scene_open(*args): + emit_event("before.file.open") + + +def _before_scene_save(return_code, client_data): + + # Default to allowing the action. Registered + # callbacks can optionally set this to False + # in order to block the operation. + OpenMaya.MScriptUtil.setBool(return_code, True) + + emit_event( + "before.save", + {"return_code": return_code} + ) + + +def _remove_workfile_lock(): + """Remove workfile lock on current file""" + if not handle_workfile_locks(): + return + filepath = current_file() + log.info("Removing lock on current file {}...".format(filepath)) + if filepath: + remove_workfile_lock(filepath) + + +def handle_workfile_locks(): + if lib.IS_HEADLESS: + return False + project_name = get_current_project_name() + return is_workfile_lock_enabled(MayaHost.name, project_name) + + +def uninstall(): + pyblish.api.deregister_plugin_path(PUBLISH_PATH) + pyblish.api.deregister_host("mayabatch") + pyblish.api.deregister_host("mayapy") + pyblish.api.deregister_host("maya") + + deregister_loader_plugin_path(LOAD_PATH) + deregister_creator_plugin_path(CREATE_PATH) + deregister_inventory_action_path(INVENTORY_PATH) + deregister_workfile_build_plugin_path(WORKFILE_BUILD_PATH) + + menu.uninstall() + + +def parse_container(container): + """Return the container node's full container data. + + Args: + container (str): A container node name. + + Returns: + dict: The container schema data for this container node. + + """ + data = lib.read(container) + + # Backwards compatibility pre-schemas for containers + data["schema"] = data.get("schema", "openpype:container-1.0") + + # Append transient data + data["objectName"] = container + + return data + + +def _ls(): + """Yields AYON container node names. + + Used by `ls()` to retrieve the nodes and then query the full container's + data. + + Yields: + str: AYON container node name (objectSet) + + """ + + def _maya_iterate(iterator): + """Helper to iterate a maya iterator""" + while not iterator.isDone(): + yield iterator.thisNode() + iterator.next() + + ids = { + AYON_CONTAINER_ID, + # Backwards compatibility + AVALON_CONTAINER_ID + } + + # Iterate over all 'set' nodes in the scene to detect whether + # they have the ayon container ".id" attribute. + fn_dep = om.MFnDependencyNode() + iterator = om.MItDependencyNodes(om.MFn.kSet) + for mobject in _maya_iterate(iterator): + if mobject.apiTypeStr != "kSet": + # Only match by exact type + continue + + fn_dep.setObject(mobject) + if not fn_dep.hasAttribute("id"): + continue + + plug = fn_dep.findPlug("id", True) + value = plug.asString() + if value in ids: + yield fn_dep.name() + + +def ls(): + """Yields containers from active Maya scene + + This is the host-equivalent of api.ls(), but instead of listing + assets on disk, it lists assets already loaded in Maya; once loaded + they are called 'containers' + + Yields: + dict: container + + """ + container_names = _ls() + for container in sorted(container_names): + yield parse_container(container) + + +def containerise(name, + namespace, + nodes, + context, + loader=None, + suffix="CON"): + """Bundle `nodes` into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + nodes (list): Long names of nodes to containerise + context (dict): Asset information + loader (str, optional): Name of loader used to produce this container. + suffix (str, optional): Suffix of container, defaults to `_CON`. + + Returns: + container (str): Name of container assembly + + """ + container = cmds.sets(nodes, name="%s_%s_%s" % (namespace, name, suffix)) + + data = [ + ("schema", "openpype:container-2.0"), + ("id", AVALON_CONTAINER_ID), + ("name", name), + ("namespace", namespace), + ("loader", loader), + ("representation", context["representation"]["id"]), + ] + + for key, value in data: + cmds.addAttr(container, longName=key, dataType="string") + cmds.setAttr(container + "." + key, str(value), type="string") + + main_container = cmds.ls(AVALON_CONTAINERS, type="objectSet") + if not main_container: + main_container = cmds.sets(empty=True, name=AVALON_CONTAINERS) + + # Implement #399: Maya 2019+ hide AVALON_CONTAINERS on creation.. + if cmds.attributeQuery("hiddenInOutliner", + node=main_container, + exists=True): + cmds.setAttr(main_container + ".hiddenInOutliner", True) + else: + main_container = main_container[0] + + cmds.sets(container, addElement=main_container) + + # Implement #399: Maya 2019+ hide containers in outliner + if cmds.attributeQuery("hiddenInOutliner", + node=container, + exists=True): + cmds.setAttr(container + ".hiddenInOutliner", True) + + return container + + +def on_init(): + log.info("Running callback on init..") + + def safe_deferred(fn): + """Execute deferred the function in a try-except""" + + def _fn(): + """safely call in deferred callback""" + try: + fn() + except Exception as exc: + print(exc) + + try: + utils.executeDeferred(_fn) + except Exception as exc: + print(exc) + + # Force load Alembic so referenced alembics + # work correctly on scene open + cmds.loadPlugin("AbcImport", quiet=True) + cmds.loadPlugin("AbcExport", quiet=True) + + # Force load objExport plug-in (requested by artists) + cmds.loadPlugin("objExport", quiet=True) + + if not lib.IS_HEADLESS: + launch_workfiles = os.environ.get("WORKFILES_STARTUP") + if launch_workfiles: + safe_deferred(host_tools.show_workfiles) + + from .customize import ( + override_component_mask_commands, + override_toolbox_ui + ) + safe_deferred(override_component_mask_commands) + safe_deferred(override_toolbox_ui) + + +def on_before_save(): + """Run validation for scene's FPS prior to saving""" + return lib.validate_fps() + + +def on_after_save(): + """Check if there is a lockfile after save""" + check_lock_on_current_file() + + +def check_lock_on_current_file(): + + """Check if there is a user opening the file""" + if not handle_workfile_locks(): + return + log.info("Running callback on checking the lock file...") + + # add the lock file when opening the file + filepath = current_file() + # Skip if current file is 'untitled' + if not filepath: + return + + if is_workfile_locked(filepath): + # add lockfile dialog + workfile_dialog = WorkfileLockDialog(filepath) + if not workfile_dialog.exec_(): + cmds.file(new=True) + return + + create_workfile_lock(filepath) + + +def on_before_close(): + """Delete the lock file after user quitting the Maya Scene""" + log.info("Closing Maya...") + # delete the lock file + filepath = current_file() + if handle_workfile_locks(): + remove_workfile_lock(filepath) + + +def before_file_open(): + """check lock file when the file changed""" + # delete the lock file + _remove_workfile_lock() + + +def on_save(): + """Automatically add IDs to new nodes + + Any transform of a mesh, without an existing ID, is given one + automatically on file save. + """ + log.info("Running callback on save..") + # remove lockfile if users jumps over from one scene to another + _remove_workfile_lock() + + # Generate ids of the current context on nodes in the scene + nodes = lib.get_id_required_nodes(referenced_nodes=False, + existing_ids=False) + for node, new_id in lib.generate_ids(nodes): + lib.set_id(node, new_id, overwrite=False) + + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + + +def on_open(): + """On scene open let's assume the containers have changed.""" + + from ayon_core.tools.utils import SimplePopup + + # Validate FPS after update_task_from_path to + # ensure it is using correct FPS for the folder + lib.validate_fps() + lib.fix_incompatible_containers() + + if any_outdated_containers(): + log.warning("Scene has outdated content.") + + # Find maya main window + parent = lib.get_main_window() + if parent is None: + log.info("Skipping outdated content pop-up " + "because Maya window can't be found.") + else: + + # Show outdated pop-up + def _on_show_inventory(): + host_tools.show_scene_inventory(parent=parent) + + dialog = SimplePopup(parent=parent) + dialog.setWindowTitle("Maya scene has outdated content") + dialog.set_message("There are outdated containers in " + "your Maya scene.") + dialog.on_clicked.connect(_on_show_inventory) + dialog.show() + + # create lock file for the maya scene + check_lock_on_current_file() + + +def on_new(): + """Set project resolution and fps when create a new file""" + log.info("Running callback on new..") + with lib.suspended_refresh(): + lib.set_context_settings() + + _remove_workfile_lock() + + +def on_task_changed(): + """Wrapped function of app initialize and maya's on task changed""" + # Run + menu.update_menu_task_label() + + workdir = os.getenv("AYON_WORKDIR") + if os.path.exists(workdir): + log.info("Updating Maya workspace for task change to %s", workdir) + _set_project() + + # Set Maya fileDialog's start-dir to /scenes + frule_scene = cmds.workspace(fileRuleEntry="scene") + cmds.optionVar(stringValue=("browserLocationmayaBinaryscene", + workdir + "/" + frule_scene)) + + else: + log.warning(( + "Can't set project for new context because path does not exist: {}" + ).format(workdir)) + + global _about_to_save + if not lib.IS_HEADLESS and _about_to_save: + # Let's prompt the user to update the context settings or not + lib.prompt_reset_context() + + +def before_workfile_open(): + if handle_workfile_locks(): + _remove_workfile_lock() + + +def before_workfile_save(event): + project_name = get_current_project_name() + if handle_workfile_locks(): + _remove_workfile_lock() + workdir_path = event["workdir_path"] + if workdir_path: + create_workspace_mel(workdir_path, project_name) + + global _about_to_save + _about_to_save = True + + +def workfile_save_before_xgen(event): + """Manage Xgen external files when switching context. + + Xgen has various external files that needs to be unique and relative to the + workfile, so we need to copy and potentially overwrite these files when + switching context. + + Args: + event (Event) - ayon_core/lib/events.py + """ + if not cmds.pluginInfo("xgenToolkit", query=True, loaded=True): + return + + import xgenm + + current_work_dir = os.getenv("AYON_WORKDIR").replace("\\", "/") + expected_work_dir = event.data["workdir_path"].replace("\\", "/") + if current_work_dir == expected_work_dir: + return + + palettes = cmds.ls(type="xgmPalette", long=True) + if not palettes: + return + + transfers = [] + overwrites = [] + attribute_changes = {} + attrs = ["xgFileName", "xgBaseFile"] + for palette in palettes: + sanitized_palette = palette.replace("|", "") + project_path = xgenm.getAttr("xgProjectPath", sanitized_palette) + _, maya_extension = os.path.splitext(event.data["filename"]) + + for attr in attrs: + node_attr = "{}.{}".format(palette, attr) + attr_value = cmds.getAttr(node_attr) + + if not attr_value: + continue + + source = os.path.join(project_path, attr_value) + + attr_value = event.data["filename"].replace( + maya_extension, + "__{}{}".format( + sanitized_palette.replace(":", "__"), + os.path.splitext(attr_value)[1] + ) + ) + target = os.path.join(expected_work_dir, attr_value) + + transfers.append((source, target)) + attribute_changes[node_attr] = attr_value + + relative_path = xgenm.getAttr( + "xgDataPath", sanitized_palette + ).split(os.pathsep)[0] + absolute_path = relative_path.replace("${PROJECT}", project_path) + for root, _, files in os.walk(absolute_path): + for f in files: + source = os.path.join(root, f).replace("\\", "/") + target = source.replace(project_path, expected_work_dir + "/") + transfers.append((source, target)) + if os.path.exists(target): + overwrites.append(target) + + # Ask user about overwriting files. + if overwrites: + log.warning( + "WARNING! Potential loss of data.\n\n" + "Found duplicate Xgen files in new context.\n{}".format( + "\n".join(overwrites) + ) + ) + return + + for source, destination in transfers: + if not os.path.exists(os.path.dirname(destination)): + os.makedirs(os.path.dirname(destination)) + shutil.copy(source, destination) + + for attribute, value in attribute_changes.items(): + cmds.setAttr(attribute, value, type="string") + + +def after_workfile_save(event): + workfile_name = event["filename"] + if ( + handle_workfile_locks() + and workfile_name + and not is_workfile_locked(workfile_name) + ): + create_workfile_lock(workfile_name) + + +class MayaDirmap(HostDirmap): + def on_enable_dirmap(self): + cmds.dirmap(en=True) + + def dirmap_routine(self, source_path, destination_path): + cmds.dirmap(m=(source_path, destination_path)) + cmds.dirmap(m=(destination_path, source_path)) diff --git a/client/ayon_maya/api/plugin.py b/client/ayon_maya/api/plugin.py new file mode 100644 index 00000000..0eb998cb --- /dev/null +++ b/client/ayon_maya/api/plugin.py @@ -0,0 +1,1027 @@ +import json +import os +from abc import ABCMeta + +import qargparse +import six +import ayon_api +from maya import cmds +from maya.app.renderSetup.model import renderSetup + +from ayon_core.lib import BoolDef, Logger +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + Anatomy, + + CreatedInstance, + Creator as NewCreator, + AutoCreator, + HiddenCreator, + + CreatorError, + LegacyCreator, + LoaderPlugin, + get_representation_path, + get_current_project_name, +) +from ayon_core.pipeline.load import LoadError +from ayon_core.pipeline.create import get_product_name + +from . import lib +from .lib import imprint, read +from .pipeline import containerise + +log = Logger.get_logger() + + +def _get_attr(node, attr, default=None): + """Helper to get attribute which allows attribute to not exist.""" + if not cmds.attributeQuery(attr, node=node, exists=True): + return default + return cmds.getAttr("{}.{}".format(node, attr)) + + +# Backwards compatibility: these functions has been moved to lib. +def get_reference_node(*args, **kwargs): + """Get the reference node from the container members + + Deprecated: + This function was moved and will be removed in 3.16.x. + """ + msg = "Function 'get_reference_node' has been moved." + log.warning(msg) + cmds.warning(msg) + return lib.get_reference_node(*args, **kwargs) + + +def get_reference_node_parents(*args, **kwargs): + """ + Deprecated: + This function was moved and will be removed in 3.16.x. + """ + msg = "Function 'get_reference_node_parents' has been moved." + log.warning(msg) + cmds.warning(msg) + return lib.get_reference_node_parents(*args, **kwargs) + + +class Creator(LegacyCreator): + defaults = ['Main'] + + def process(self): + nodes = list() + + with lib.undo_chunk(): + if (self.options or {}).get("useSelection"): + nodes = cmds.ls(selection=True) + + instance = cmds.sets(nodes, name=self.name) + lib.imprint(instance, self.data) + + return instance + + +@six.add_metaclass(ABCMeta) +class MayaCreatorBase(object): + + @staticmethod + def cache_instance_data(shared_data): + """Cache instances for Creators to shared data. + + Create `maya_cached_instance_data` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + If legacy instances are detected in the scene, create + `maya_cached_legacy_instances` there and fill it with + all legacy products under product type as a key. + + Args: + Dict[str, Any]: Shared data. + + """ + if shared_data.get("maya_cached_instance_data") is None: + cache = dict() + cache_legacy = dict() + + for node in cmds.ls(type="objectSet"): + + if _get_attr(node, attr="id") not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: + continue + + creator_id = _get_attr(node, attr="creator_identifier") + if creator_id is not None: + # creator instance + cache.setdefault(creator_id, []).append(node) + else: + # legacy instance + family = _get_attr(node, attr="family") + if family is None: + # must be a broken instance + continue + + cache_legacy.setdefault(family, []).append(node) + + shared_data["maya_cached_instance_data"] = cache + shared_data["maya_cached_legacy_instances"] = cache_legacy + return shared_data + + def get_publish_families(self): + """Return families for the instances of this creator. + + Allow a Creator to define multiple families so that a creator can + e.g. specify `usd` and `usdMaya` and another USD creator can also + specify `usd` but apply different extractors like `usdMultiverse`. + + There is no need to override this method if you only have the + 'product_type' required for publish filtering. + + Returns: + list: families for instances of this creator + + """ + return [] + + def imprint_instance_node(self, node, data): + + # We never store the instance_node as value on the node since + # it's the node name itself + data.pop("instance_node", None) + data.pop("instance_id", None) + + # Don't store `families` since it's up to the creator itself + # to define the initial publish families - not a stored attribute of + # `families` + data.pop("families", None) + + # We store creator attributes at the root level and assume they + # will not clash in names with `product`, `task`, etc. and other + # default names. This is just so these attributes in many cases + # are still editable in the maya UI by artists. + # note: pop to move to end of dict to sort attributes last on the node + creator_attributes = data.pop("creator_attributes", {}) + + # We only flatten value types which `imprint` function supports + json_creator_attributes = {} + for key, value in dict(creator_attributes).items(): + if isinstance(value, (list, tuple, dict)): + creator_attributes.pop(key) + json_creator_attributes[key] = value + + # Flatten remaining creator attributes to the node itself + data.update(creator_attributes) + + # We know the "publish_attributes" will be complex data of + # settings per plugins, we'll store this as a flattened json structure + # pop to move to end of dict to sort attributes last on the node + data["publish_attributes"] = json.dumps( + data.pop("publish_attributes", {}) + ) + + # Persist the non-flattened creator attributes (special value types, + # like multiselection EnumDef) + data["creator_attributes"] = json.dumps(json_creator_attributes) + + # Since we flattened the data structure for creator attributes we want + # to correctly detect which flattened attributes should end back in the + # creator attributes when reading the data from the node, so we store + # the relevant keys as a string + data["__creator_attributes_keys"] = ",".join(creator_attributes.keys()) + + # Kill any existing attributes just so we can imprint cleanly again + for attr in data.keys(): + if cmds.attributeQuery(attr, node=node, exists=True): + cmds.deleteAttr("{}.{}".format(node, attr)) + + return imprint(node, data) + + def read_instance_node(self, node): + node_data = read(node) + + # Never care about a cbId attribute on the object set + # being read as 'data' + node_data.pop("cbId", None) + + # Make sure we convert any creator attributes from the json string + creator_attributes = node_data.get("creator_attributes") + if creator_attributes: + node_data["creator_attributes"] = json.loads(creator_attributes) + else: + node_data["creator_attributes"] = {} + + # Move the relevant attributes into "creator_attributes" that + # we flattened originally + creator_attribute_keys = node_data.pop("__creator_attributes_keys", + "").split(",") + for key in creator_attribute_keys: + if key in node_data: + node_data["creator_attributes"][key] = node_data.pop(key) + + # Make sure we convert any publish attributes from the json string + publish_attributes = node_data.get("publish_attributes") + if publish_attributes: + node_data["publish_attributes"] = json.loads(publish_attributes) + + # Explicitly re-parse the node name + node_data["instance_node"] = node + node_data["instance_id"] = node + + # If the creator plug-in specifies + families = self.get_publish_families() + if families: + node_data["families"] = families + + return node_data + + def _default_collect_instances(self): + self.cache_instance_data(self.collection_shared_data) + cached_instances = ( + self.collection_shared_data["maya_cached_instance_data"] + ) + for node in cached_instances.get(self.identifier, []): + node_data = self.read_instance_node(node) + + created_instance = CreatedInstance.from_existing(node_data, self) + self._add_instance_to_context(created_instance) + + def _default_update_instances(self, update_list): + for created_inst, _changes in update_list: + data = created_inst.data_to_store() + node = data.get("instance_node") + + self.imprint_instance_node(node, data) + + def _default_remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + node = instance.data.get("instance_node") + if node: + cmds.delete(node) + + self._remove_instance_from_context(instance) + + +@six.add_metaclass(ABCMeta) +class MayaCreator(NewCreator, MayaCreatorBase): + + settings_category = "maya" + + def create(self, product_name, instance_data, pre_create_data): + + members = list() + if pre_create_data.get("use_selection"): + members = cmds.ls(selection=True) + + # Allow a Creator to define multiple families + publish_families = self.get_publish_families() + if publish_families: + families = instance_data.setdefault("families", []) + for family in self.get_publish_families(): + if family not in families: + families.append(family) + + with lib.undo_chunk(): + instance_node = cmds.sets(members, name=product_name) + instance_data["instance_node"] = instance_node + instance = CreatedInstance( + self.product_type, + product_name, + instance_data, + self) + self._add_instance_to_context(instance) + + self.imprint_instance_node(instance_node, + data=instance.data_to_store()) + return instance + + def collect_instances(self): + return self._default_collect_instances() + + def update_instances(self, update_list): + return self._default_update_instances(update_list) + + def remove_instances(self, instances): + return self._default_remove_instances(instances) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", + label="Use selection", + default=True) + ] + + +class MayaAutoCreator(AutoCreator, MayaCreatorBase): + """Automatically triggered creator for Maya. + + The plugin is not visible in UI, and 'create' method does not expect + any arguments. + """ + + settings_category = "maya" + + def collect_instances(self): + return self._default_collect_instances() + + def update_instances(self, update_list): + return self._default_update_instances(update_list) + + def remove_instances(self, instances): + return self._default_remove_instances(instances) + + +class MayaHiddenCreator(HiddenCreator, MayaCreatorBase): + """Hidden creator for Maya. + + The plugin is not visible in UI, and it does not have strictly defined + arguments for 'create' method. + """ + + settings_category = "maya" + + def create(self, *args, **kwargs): + return MayaCreator.create(self, *args, **kwargs) + + def collect_instances(self): + return self._default_collect_instances() + + def update_instances(self, update_list): + return self._default_update_instances(update_list) + + def remove_instances(self, instances): + return self._default_remove_instances(instances) + + +def ensure_namespace(namespace): + """Make sure the namespace exists. + + Args: + namespace (str): The preferred namespace name. + + Returns: + str: The generated or existing namespace + + """ + exists = cmds.namespace(exists=namespace) + if exists: + return namespace + else: + return cmds.namespace(add=namespace) + + +class RenderlayerCreator(NewCreator, MayaCreatorBase): + """Creator which creates an instance per renderlayer in the workfile. + + Create and manages renderlayer product per renderLayer in workfile. + This generates a singleton node in the scene which, if it exists, tells the + Creator to collect Maya rendersetup renderlayers as individual instances. + As such, triggering create doesn't actually create the instance node per + layer but only the node which tells the Creator it may now collect + an instance per renderlayer. + + """ + + # These are required to be overridden in subclass + singleton_node_name = "" + + # These are optional to be overridden in subclass + layer_instance_prefix = None + + def _get_singleton_node(self, return_all=False): + nodes = lib.lsattr("pre_creator_identifier", self.identifier) + if nodes: + return nodes if return_all else nodes[0] + + def create(self, product_name, instance_data, pre_create_data): + # A Renderlayer is never explicitly created using the create method. + # Instead, renderlayers from the scene are collected. Thus "create" + # would only ever be called to say, 'hey, please refresh collect' + self.create_singleton_node() + + # if no render layers are present, create default one with + # asterisk selector + rs = renderSetup.instance() + if not rs.getRenderLayers(): + render_layer = rs.createRenderLayer("Main") + collection = render_layer.createCollection("defaultCollection") + collection.getSelector().setPattern('*') + + # By RenderLayerCreator.create we make it so that the renderlayer + # instances directly appear even though it just collects scene + # renderlayers. This doesn't actually 'create' any scene contents. + self.collect_instances() + + def create_singleton_node(self): + if self._get_singleton_node(): + raise CreatorError("A Render instance already exists - only " + "one can be configured.") + + with lib.undo_chunk(): + node = cmds.sets(empty=True, name=self.singleton_node_name) + lib.imprint(node, data={ + "pre_creator_identifier": self.identifier + }) + + return node + + def collect_instances(self): + + # We only collect if the global render instance exists + if not self._get_singleton_node(): + return + + host_name = self.create_context.host_name + rs = renderSetup.instance() + layers = rs.getRenderLayers() + for layer in layers: + layer_instance_node = self.find_layer_instance_node(layer) + if layer_instance_node: + data = self.read_instance_node(layer_instance_node) + instance = CreatedInstance.from_existing(data, creator=self) + else: + # No existing scene instance node for this layer. Note that + # this instance will not have the `instance_node` data yet + # until it's been saved/persisted at least once. + project_name = self.create_context.get_current_project_name() + folder_path = self.create_context.get_current_folder_path() + task_name = self.create_context.get_current_task_name() + instance_data = { + "folderPath": folder_path, + "task": task_name, + "variant": layer.name(), + } + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + product_name = self.get_product_name( + project_name, + folder_entity, + task_entity, + layer.name(), + host_name, + ) + + instance = CreatedInstance( + product_type=self.product_type, + product_name=product_name, + data=instance_data, + creator=self + ) + + instance.transient_data["layer"] = layer + self._add_instance_to_context(instance) + + def find_layer_instance_node(self, layer): + connected_sets = cmds.listConnections( + "{}.message".format(layer.name()), + source=False, + destination=True, + type="objectSet" + ) or [] + + for node in connected_sets: + if not cmds.attributeQuery("creator_identifier", + node=node, + exists=True): + continue + + creator_identifier = cmds.getAttr(node + ".creator_identifier") + if creator_identifier == self.identifier: + self.log.info("Found node: {}".format(node)) + return node + + def _create_layer_instance_node(self, layer): + + # We only collect if a CreateRender instance exists + create_render_set = self._get_singleton_node() + if not create_render_set: + raise CreatorError("Creating a renderlayer instance node is not " + "allowed if no 'CreateRender' instance exists") + + namespace = "_{}".format(self.singleton_node_name) + namespace = ensure_namespace(namespace) + + name = "{}:{}".format(namespace, layer.name()) + render_set = cmds.sets(name=name, empty=True) + + # Keep an active link with the renderlayer so we can retrieve it + # later by a physical maya connection instead of relying on the layer + # name + cmds.addAttr(render_set, longName="renderlayer", at="message") + cmds.connectAttr("{}.message".format(layer.name()), + "{}.renderlayer".format(render_set), force=True) + + # Add the set to the 'CreateRender' set. + cmds.sets(render_set, forceElement=create_render_set) + + return render_set + + def update_instances(self, update_list): + # We only generate the persisting layer data into the scene once + # we save with the UI on e.g. validate or publish + for instance, _changes in update_list: + instance_node = instance.data.get("instance_node") + + # Ensure a node exists to persist the data to + if not instance_node: + layer = instance.transient_data["layer"] + instance_node = self._create_layer_instance_node(layer) + instance.data["instance_node"] = instance_node + + self.imprint_instance_node(instance_node, + data=instance.data_to_store()) + + def imprint_instance_node(self, node, data): + # Do not ever try to update the `renderlayer` since it'll try + # to remove the attribute and recreate it but fail to keep it a + # message attribute link. We only ever imprint that on the initial + # node creation. + # TODO: Improve how this is handled + data.pop("renderlayer", None) + data.get("creator_attributes", {}).pop("renderlayer", None) + + return super(RenderlayerCreator, self).imprint_instance_node(node, + data=data) + + def remove_instances(self, instances): + """Remove specified instances from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + # Instead of removing the single instance or renderlayers we instead + # remove the CreateRender node this creator relies on to decide whether + # it should collect anything at all. + nodes = self._get_singleton_node(return_all=True) + if nodes: + cmds.delete(nodes) + + # Remove ALL the instances even if only one gets deleted + for instance in list(self.create_context.instances): + if instance.get("creator_identifier") == self.identifier: + self._remove_instance_from_context(instance) + + # Remove the stored settings per renderlayer too + node = instance.data.get("instance_node") + if node and cmds.objExists(node): + cmds.delete(node) + + def get_product_name( + self, + project_name, + folder_entity, + task_entity, + variant, + host_name=None, + instance=None + ): + if host_name is None: + host_name = self.create_context.host_name + dynamic_data = self.get_dynamic_data( + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance + ) + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + # creator.product_type != 'render' as expected + return get_product_name( + project_name, + task_name, + task_type, + host_name, + self.layer_instance_prefix or self.product_type, + variant, + dynamic_data=dynamic_data, + project_settings=self.project_settings + ) + + +def get_load_color_for_product_type(product_type, settings=None): + """Get color for product type from settings. + + Args: + product_type (str): Family name. + settings (Optional[dict]): Settings dictionary. + + Returns: + Union[tuple[float, float, float], None]: RGB color. + + """ + if settings is None: + settings = get_project_settings(get_current_project_name()) + + colors = settings["maya"]["load"]["colors"] + color = colors.get(product_type) + if not color: + return None + + if len(color) == 3: + red, green, blue = color + elif len(color) == 4: + red, green, blue, _ = color + else: + raise ValueError("Invalid color definition {}".format(str(color))) + + if isinstance(red, int): + red = red / 255.0 + green = green / 255.0 + blue = blue / 255.0 + return red, green, blue + + +class Loader(LoaderPlugin): + hosts = ["maya"] + + load_settings = {} # defined in settings + + @classmethod + def apply_settings(cls, project_settings): + super(Loader, cls).apply_settings(project_settings) + cls.load_settings = project_settings['maya']['load'] + + def get_custom_namespace_and_group(self, context, options, loader_key): + """Queries Settings to get custom template for namespace and group. + + Group template might be empty >> this forces to not wrap imported items + into separate group. + + Args: + context (dict) + options (dict): artist modifiable options from dialog + loader_key (str): key to get separate configuration from Settings + ('reference_loader'|'import_loader') + """ + + options["attach_to_root"] = True + custom_naming = self.load_settings[loader_key] + + if not custom_naming["namespace"]: + raise LoadError("No namespace specified in " + "Maya ReferenceLoader settings") + elif not custom_naming["group_name"]: + self.log.debug("No custom group_name, no group will be created.") + options["attach_to_root"] = False + + folder_entity = context["folder"] + product_entity = context["product"] + product_name = product_entity["name"] + product_type = product_entity["productType"] + formatting_data = { + "asset_name": folder_entity["name"], + "asset_type": "asset", + "folder": { + "name": folder_entity["name"], + }, + "subset": product_name, + "product": { + "name": product_name, + "type": product_type, + }, + "family": product_type + } + + custom_namespace = custom_naming["namespace"].format( + **formatting_data + ) + + custom_group_name = custom_naming["group_name"].format( + **formatting_data + ) + + return custom_group_name, custom_namespace, options + + +class ReferenceLoader(Loader): + """A basic ReferenceLoader for Maya + + This will implement the basic behavior for a loader to inherit from that + will containerize the reference and will implement the `remove` and + `update` logic. + + """ + + options = [ + qargparse.Integer( + "count", + label="Count", + default=1, + min=1, + help="How many times to load?" + ), + qargparse.Double3( + "offset", + label="Position Offset", + help="Offset loaded models for easier selection." + ), + qargparse.Boolean( + "attach_to_root", + label="Group imported asset", + default=True, + help="Should a group be created to encapsulate" + " imported representation ?" + ) + ] + + def load( + self, + context, + name=None, + namespace=None, + options=None + ): + path = self.filepath_from_context(context) + assert os.path.exists(path), "%s does not exist." % path + + custom_group_name, custom_namespace, options = \ + self.get_custom_namespace_and_group(context, options, + "reference_loader") + + count = options.get("count") or 1 + + loaded_containers = [] + for c in range(0, count): + namespace = lib.get_custom_namespace(custom_namespace) + group_name = "{}:{}".format( + namespace, + custom_group_name + ) + + options['group_name'] = group_name + + # Offset loaded product + if "offset" in options: + offset = [i * c for i in options["offset"]] + options["translate"] = offset + + self.log.info(options) + + self.process_reference( + context=context, + name=name, + namespace=namespace, + options=options + ) + + # Only containerize if any nodes were loaded by the Loader + nodes = self[:] + if not nodes: + return + + ref_node = lib.get_reference_node(nodes, self.log) + container = containerise( + name=name, + namespace=namespace, + nodes=[ref_node], + context=context, + loader=self.__class__.__name__ + ) + loaded_containers.append(container) + self._organize_containers(nodes, container) + c += 1 + + return loaded_containers + + def process_reference(self, context, name, namespace, options): + """To be implemented by subclass""" + raise NotImplementedError("Must be implemented by subclass") + + def update(self, container, context): + from maya import cmds + + from ayon_maya.api.lib import get_container_members + + node = container["objectName"] + + project_name = context["project"]["name"] + repre_entity = context["representation"] + + path = get_representation_path(repre_entity) + + # Get reference node from container members + members = get_container_members(node) + reference_node = lib.get_reference_node(members, self.log) + namespace = cmds.referenceQuery(reference_node, namespace=True) + + file_type = { + "ma": "mayaAscii", + "mb": "mayaBinary", + "abc": "Alembic", + "fbx": "FBX", + "usd": "USD Import" + }.get(repre_entity["name"]) + + assert file_type, "Unsupported representation: %s" % repre_entity + + assert os.path.exists(path), "%s does not exist." % path + + # Need to save alembic settings and reapply, cause referencing resets + # them to incoming data. + alembic_attrs = ["speed", "offset", "cycleType", "time"] + alembic_data = {} + if repre_entity["name"] == "abc": + alembic_nodes = cmds.ls( + "{}:*".format(namespace), type="AlembicNode" + ) + if alembic_nodes: + for attr in alembic_attrs: + node_attr = "{}.{}".format(alembic_nodes[0], attr) + data = { + "input": lib.get_attribute_input(node_attr), + "value": cmds.getAttr(node_attr) + } + + alembic_data[attr] = data + else: + self.log.debug("No alembic nodes found in {}".format(members)) + + try: + path = self.prepare_root_value(path, project_name) + content = cmds.file(path, + loadReference=reference_node, + type=file_type, + returnNewNodes=True) + except RuntimeError as exc: + # When changing a reference to a file that has load errors the + # command will raise an error even if the file is still loaded + # correctly (e.g. when raising errors on Arnold attributes) + # When the file is loaded and has content, we consider it's fine. + if not cmds.referenceQuery(reference_node, isLoaded=True): + raise + + content = cmds.referenceQuery(reference_node, + nodes=True, + dagPath=True) + if not content: + raise + + self.log.warning("Ignoring file read error:\n%s", exc) + + self._organize_containers(content, container["objectName"]) + + # Reapply alembic settings. + if repre_entity["name"] == "abc" and alembic_data: + alembic_nodes = cmds.ls( + "{}:*".format(namespace), type="AlembicNode" + ) + if alembic_nodes: + alembic_node = alembic_nodes[0] # assume single AlembicNode + for attr, data in alembic_data.items(): + node_attr = "{}.{}".format(alembic_node, attr) + input = lib.get_attribute_input(node_attr) + if data["input"]: + if data["input"] != input: + cmds.connectAttr( + data["input"], node_attr, force=True + ) + else: + if input: + cmds.disconnectAttr(input, node_attr) + cmds.setAttr(node_attr, data["value"]) + + # Fix PLN-40 for older containers created with AYON that had the + # `.verticesOnlySet` set to True. + if cmds.getAttr("{}.verticesOnlySet".format(node)): + self.log.info("Setting %s.verticesOnlySet to False", node) + cmds.setAttr("{}.verticesOnlySet".format(node), False) + + # Remove any placeHolderList attribute entries from the set that + # are remaining from nodes being removed from the referenced file. + members = cmds.sets(node, query=True) + invalid = [x for x in members if ".placeHolderList" in x] + if invalid: + cmds.sets(invalid, remove=node) + + # Update metadata + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + + # When an animation or pointcache gets connected to an Xgen container, + # the compound attribute "xgenContainers" gets created. When animation + # containers gets updated we also need to update the cacheFileName on + # the Xgen collection. + compound_name = "xgenContainers" + if cmds.objExists("{}.{}".format(node, compound_name)): + import xgenm + container_amount = cmds.getAttr( + "{}.{}".format(node, compound_name), size=True + ) + # loop through all compound children + for i in range(container_amount): + attr = "{}.{}[{}].container".format(node, compound_name, i) + objectset = cmds.listConnections(attr)[0] + reference_node = cmds.sets(objectset, query=True)[0] + palettes = cmds.ls( + cmds.referenceQuery(reference_node, nodes=True), + type="xgmPalette" + ) + for palette in palettes: + for description in xgenm.descriptions(palette): + xgenm.setAttr( + "cacheFileName", + path.replace("\\", "/"), + palette, + description, + "SplinePrimitive" + ) + + # Refresh UI and viewport. + de = xgenm.xgGlobal.DescriptionEditor + de.refresh("Full") + + def remove(self, container): + """Remove an existing `container` from Maya scene + + Deprecated; this functionality is replaced by `api.remove()` + + Arguments: + container (openpype:container-1.0): Which container + to remove from scene. + + """ + from maya import cmds + + node = container["objectName"] + + # Assume asset has been referenced + members = cmds.sets(node, query=True) + reference_node = lib.get_reference_node(members, self.log) + + assert reference_node, ("Imported container not supported; " + "container must be referenced.") + + self.log.info("Removing '%s' from Maya.." % container["name"]) + + namespace = cmds.referenceQuery(reference_node, namespace=True) + fname = cmds.referenceQuery(reference_node, filename=True) + cmds.file(fname, removeReference=True) + + try: + cmds.delete(node) + except ValueError: + # Already implicitly deleted by Maya upon removing reference + pass + + try: + # If container is not automatically cleaned up by May (issue #118) + cmds.namespace(removeNamespace=namespace, + deleteNamespaceContent=True) + except RuntimeError: + pass + + def prepare_root_value(self, file_url, project_name): + """Replace root value with env var placeholder. + + Use ${AYON_PROJECT_ROOT_WORK} (or any other root) instead of proper + root value when storing referenced url into a workfile. + Useful for remote workflows with SiteSync. + + Args: + file_url (str) + project_name (dict) + Returns: + (str) + """ + settings = get_project_settings(project_name) + use_env_var_as_root = (settings["maya"] + ["maya_dirmap"] + ["use_env_var_as_root"]) + if use_env_var_as_root: + anatomy = Anatomy(project_name) + file_url = anatomy.replace_root_with_env_key(file_url, '${{{}}}') + + return file_url + + @staticmethod + def _organize_containers(nodes, container): + # type: (list, str) -> None + """Put containers in loaded data to correct hierarchy.""" + for node in nodes: + id_attr = "{}.id".format(node) + if not cmds.attributeQuery("id", node=node, exists=True): + continue + if cmds.getAttr(id_attr) not in { + AYON_CONTAINER_ID, AVALON_CONTAINER_ID + }: + cmds.sets(node, forceElement=container) diff --git a/client/ayon_maya/api/render_setup_tools.py b/client/ayon_maya/api/render_setup_tools.py new file mode 100644 index 00000000..9b00b53e --- /dev/null +++ b/client/ayon_maya/api/render_setup_tools.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +"""Export stuff in render setup layer context. + +Export Maya nodes from Render Setup layer as if flattened in that layer instead +of exporting the defaultRenderLayer as Maya forces by default + +Credits: Roy Nieterau (BigRoy) / Colorbleed +Modified for use in AYON + +""" + +import os +import contextlib + +from maya import cmds +from maya.app.renderSetup.model import renderSetup + +from .lib import pairwise + + +@contextlib.contextmanager +def allow_export_from_render_setup_layer(): + """Context manager to override Maya settings to allow RS layer export""" + try: + + rs = renderSetup.instance() + + # Exclude Render Setup nodes from the export + rs._setAllRSNodesDoNotWrite(True) + + # Disable Render Setup forcing the switch to master layer + os.environ["MAYA_BATCH_RENDER_EXPORT"] = "1" + + yield + + finally: + # Reset original state + rs._setAllRSNodesDoNotWrite(False) + os.environ.pop("MAYA_BATCH_RENDER_EXPORT", None) + + +def export_in_rs_layer(path, nodes, export=None): + """Export nodes from Render Setup layer. + + When exporting from Render Setup layer Maya by default + forces a switch to the defaultRenderLayer as such making + it impossible to export the contents of a Render Setup + layer. Maya presents this warning message: + # Warning: Exporting Render Setup master layer content # + + This function however avoids the renderlayer switch and + exports from the Render Setup layer as if the edits were + 'flattened' in the master layer. + + It does so by: + - Allowing export from Render Setup Layer + - Enforce Render Setup nodes to NOT be written on export + - Disconnect connections from any `applyOverride` nodes + to flatten the values (so they are written correctly)* + *Connection overrides like Shader Override and Material + Overrides export correctly out of the box since they don't + create an intermediate connection to an 'applyOverride' node. + However, any scalar override (absolute or relative override) + will get input connections in the layer so we'll break those + to 'store' the values on the attribute itself and write value + out instead. + + Args: + path (str): File path to export to. + nodes (list): Maya nodes to export. + export (callable, optional): Callback to be used for exporting. If + not specified, default export to `.ma` will be called. + + Returns: + None + + Raises: + AssertionError: When not in a Render Setup layer an + AssertionError is raised. This command assumes + you are currently in a Render Setup layer. + + """ + rs = renderSetup.instance() + assert rs.getVisibleRenderLayer().name() != "defaultRenderLayer", \ + ("Export in Render Setup layer is only supported when in " + "Render Setup layer") + + # Break connection to any value overrides + history = cmds.listHistory(nodes) or [] + nodes_all = list( + set(cmds.ls(nodes + history, long=True, objectsOnly=True))) + overrides = cmds.listConnections(nodes_all, + source=True, + destination=False, + type="applyOverride", + plugs=True, + connections=True) or [] + for dest, src in pairwise(overrides): + # Even after disconnecting the values + # should be preserved as they were + # Note: animated overrides would be lost for export + cmds.disconnectAttr(src, dest) + + # Export Selected + with allow_export_from_render_setup_layer(): + cmds.select(nodes, noExpand=True) + if export: + export() + else: + cmds.file(path, + force=True, + typ="mayaAscii", + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True) + + if overrides: + # If we have broken override connections then Maya + # is unaware that the Render Setup layer is in an + # invalid state. So let's 'hard reset' the state + # by going to default render layer and switching back + layer = rs.getVisibleRenderLayer() + rs.switchToLayer(None) + rs.switchToLayer(layer) diff --git a/client/ayon_maya/api/setdress.py b/client/ayon_maya/api/setdress.py new file mode 100644 index 00000000..a130b93f --- /dev/null +++ b/client/ayon_maya/api/setdress.py @@ -0,0 +1,606 @@ +import logging +import json +import os + +import contextlib +import copy + +import six +import ayon_api + +from maya import cmds + +from ayon_core.pipeline import ( + schema, + discover_loader_plugins, + loaders_from_representation, + load_container, + update_container, + remove_container, + get_representation_path, + get_current_project_name, +) +from ayon_maya.api.lib import ( + matrix_equals, + unique_namespace, + get_container_transforms, + DEFAULT_MATRIX +) + +log = logging.getLogger("PackageLoader") + + +def to_namespace(node, namespace): + """Return node name as if it's inside the namespace. + + Args: + node (str): Node name + namespace (str): Namespace + + Returns: + str: The node in the namespace. + + """ + namespace_prefix = "|{}:".format(namespace) + node = namespace_prefix.join(node.split("|")) + return node + + +@contextlib.contextmanager +def namespaced(namespace, new=True): + """Work inside namespace during context + + Args: + new (bool): When enabled this will rename the namespace to a unique + namespace if the input namespace already exists. + + Yields: + str: The namespace that is used during the context + + """ + original = cmds.namespaceInfo(cur=True) + if new: + namespace = unique_namespace(namespace) + cmds.namespace(add=namespace) + + try: + cmds.namespace(set=namespace) + yield namespace + finally: + cmds.namespace(set=original) + + +@contextlib.contextmanager +def unlocked(nodes): + + # Get node state by Maya's uuid + nodes = cmds.ls(nodes, long=True) + uuids = cmds.ls(nodes, uuid=True) + states = cmds.lockNode(nodes, query=True, lock=True) + states = {uuid: state for uuid, state in zip(uuids, states)} + originals = {uuid: node for uuid, node in zip(uuids, nodes)} + + try: + cmds.lockNode(nodes, lock=False) + yield + finally: + # Reapply original states + _iteritems = getattr(states, "iteritems", states.items) + for uuid, state in _iteritems(): + nodes_from_id = cmds.ls(uuid, long=True) + if nodes_from_id: + node = nodes_from_id[0] + else: + log.debug("Falling back to node name: %s", node) + node = originals[uuid] + if not cmds.objExists(node): + log.warning("Unable to find: %s", node) + continue + cmds.lockNode(node, lock=state) + + +def load_package(filepath, name, namespace=None): + """Load a package that was gathered elsewhere. + + A package is a group of published instances, possibly with additional data + in a hierarchy. + + """ + + if namespace is None: + # Define a unique namespace for the package + namespace = os.path.basename(filepath).split(".")[0] + unique_namespace(namespace) + assert isinstance(namespace, six.string_types) + + # Load the setdress package data + with open(filepath, "r") as fp: + data = json.load(fp) + + # Load the setdress alembic hierarchy + # We import this into the namespace in which we'll load the package's + # instances into afterwards. + alembic = filepath.replace(".json", ".abc") + hierarchy = cmds.file(alembic, + reference=True, + namespace=namespace, + returnNewNodes=True, + groupReference=True, + groupName="{}:{}".format(namespace, name), + typ="Alembic") + + # Get the top root node (the reference group) + root = "{}:{}".format(namespace, name) + + containers = [] + all_loaders = discover_loader_plugins() + for representation_id, instances in data.items(): + + # Find the compatible loaders + loaders = loaders_from_representation( + all_loaders, representation_id + ) + + for instance in instances: + container = _add(instance=instance, + representation_id=representation_id, + loaders=loaders, + namespace=namespace, + root=root) + containers.append(container) + + # TODO: Do we want to cripple? Or do we want to add a 'parent' parameter? + # Cripple the original AYON containers so they don't show up in the + # manager + # for container in containers: + # cmds.setAttr("%s.id" % container, + # "setdress.container", + # type="string") + + # TODO: Lock all loaded nodes + # This is to ensure the hierarchy remains unaltered by the artists + # for node in nodes: + # cmds.lockNode(node, lock=True) + + return containers + hierarchy + + +def _add(instance, representation_id, loaders, namespace, root="|"): + """Add an item from the package + + Args: + instance (dict): + representation_id (str): + loaders (list): + namespace (str): + + Returns: + str: The created AYON container. + + """ + + # Process within the namespace + with namespaced(namespace, new=False) as namespace: + + # Get the used loader + Loader = next((x for x in loaders if + x.__name__ == instance['loader']), + None) + + if Loader is None: + log.warning("Loader is missing: %s. Skipping %s", + instance['loader'], instance) + raise RuntimeError("Loader is missing.") + + container = load_container( + Loader, + representation_id, + namespace=instance['namespace'] + ) + + # Get the root from the loaded container + loaded_root = get_container_transforms({"objectName": container}, + root=True) + + # Apply matrix to root node (if any matrix edits) + matrix = instance.get("matrix", None) + if matrix: + cmds.xform(loaded_root, objectSpace=True, matrix=matrix) + + # Parent into the setdress hierarchy + # Namespace is missing from parent node(s), add namespace + # manually + parent = root + to_namespace(instance["parent"], namespace) + cmds.parent(loaded_root, parent, relative=True) + + return container + + +# Store root nodes based on representation and namespace +def _instances_by_namespace(data): + """Rebuild instance data so we can look it up by namespace. + + Note that the `representation` is added into the instance's + data with a `representation` key. + + Args: + data (dict): scene build data + + Returns: + dict + + """ + result = {} + # Add new assets + for representation_id, instances in data.items(): + + # Ensure we leave the source data unaltered + instances = copy.deepcopy(instances) + for instance in instances: + instance['representation'] = representation_id + result[instance['namespace']] = instance + + return result + + +def get_contained_containers(container): + """Get the AYON containers in this container + + Args: + container (dict): The container dict. + + Returns: + list: A list of member container dictionaries. + + """ + + from .pipeline import parse_container + + # Get AYON containers in this package setdress container + containers = [] + members = cmds.sets(container['objectName'], query=True) + for node in cmds.ls(members, type="objectSet"): + try: + member_container = parse_container(node) + containers.append(member_container) + except schema.ValidationError: + pass + + return containers + + +def update_package_version(container, version): + """ + Update package by version number + + Args: + container (dict): container data of the container node + version (int): the new version number of the package + + Returns: + None + + """ + + # Versioning (from `core.maya.pipeline`) + project_name = get_current_project_name() + repre_id = container["representation"] + current_representation = ayon_api.get_representation_by_id( + project_name, repre_id + ) + + assert current_representation is not None, "This is a bug" + + ( + version_entity, + product_entity, + folder_entity, + project_entity + ) = ayon_api.get_representation_parents(project_name, repre_id) + + if version == -1: + new_version = ayon_api.get_last_version_by_product_id( + project_name, product_entity["id"] + ) + else: + new_version = ayon_api.get_version_by_name( + project_name, version, product_entity["id"] + ) + + if new_version is None: + raise ValueError("Version not found: {}".format(version)) + + # Get the new representation (new file) + new_representation = ayon_api.get_representation_by_name( + project_name, current_representation["name"], new_version["id"] + ) + # TODO there is 'get_representation_context' to get the context which + # could be possible to use here + new_context = { + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, + "representation": new_representation, + } + update_package(container, new_context) + + +def update_package(set_container, context): + """Update any matrix changes in the scene based on the new data + + Args: + set_container (dict): container data from `ls()` + context (dict): the representation document from the database + + Returns: + None + + """ + + # Load the original package data + project_name = context["project"]["name"] + repre_entity = context["representation"] + current_representation = ayon_api.get_representation_by_id( + project_name, set_container["representation"] + ) + + current_file = get_representation_path(current_representation) + assert current_file.endswith(".json") + with open(current_file, "r") as fp: + current_data = json.load(fp) + + # Load the new package data + new_file = get_representation_path(repre_entity) + assert new_file.endswith(".json") + with open(new_file, "r") as fp: + new_data = json.load(fp) + + # Update scene content + containers = get_contained_containers(set_container) + update_scene(set_container, containers, current_data, new_data, new_file) + + # TODO: This should be handled by the pipeline itself + cmds.setAttr(set_container['objectName'] + ".representation", + context["representation"]["id"], type="string") + + +def update_scene(set_container, containers, current_data, new_data, new_file): + """Updates the hierarchy, assets and their matrix + + Updates the following within the scene: + * Setdress hierarchy alembic + * Matrix + * Parenting + * Representations + + It removes any assets which are not present in the new build data + + Args: + set_container (dict): the setdress container of the scene + containers (list): the list of containers under the setdress container + current_data (dict): the current build data of the setdress + new_data (dict): the new build data of the setdres + + Returns: + processed_containers (list): all new and updated containers + + """ + + set_namespace = set_container['namespace'] + project_name = get_current_project_name() + + # Update the setdress hierarchy alembic + set_root = get_container_transforms(set_container, root=True) + set_hierarchy_root = cmds.listRelatives(set_root, fullPath=True)[0] + set_hierarchy_reference = cmds.referenceQuery(set_hierarchy_root, + referenceNode=True) + new_alembic = new_file.replace(".json", ".abc") + assert os.path.exists(new_alembic), "%s does not exist." % new_alembic + with unlocked(cmds.listRelatives(set_root, ad=True, fullPath=True)): + cmds.file(new_alembic, + loadReference=set_hierarchy_reference, + type="Alembic") + + identity = DEFAULT_MATRIX[:] + + processed_namespaces = set() + processed_containers = list() + + new_lookup = _instances_by_namespace(new_data) + old_lookup = _instances_by_namespace(current_data) + repre_ids = set() + containers_for_repre_compare = [] + for container in containers: + container_ns = container['namespace'] + + # Consider it processed here, even it it fails we want to store that + # the namespace was already available. + processed_namespaces.add(container_ns) + processed_containers.append(container['objectName']) + + if container_ns not in new_lookup: + # Remove this container because it's not in the new data + log.warning("Removing content: %s", container_ns) + remove_container(container) + continue + + root = get_container_transforms(container, root=True) + if not root: + log.error("Can't find root for %s", container['objectName']) + continue + + old_instance = old_lookup.get(container_ns, {}) + new_instance = new_lookup[container_ns] + + # Update the matrix + # check matrix against old_data matrix to find local overrides + current_matrix = cmds.xform(root, + query=True, + matrix=True, + objectSpace=True) + + original_matrix = old_instance.get("matrix", identity) + has_matrix_override = not matrix_equals(current_matrix, + original_matrix) + + if has_matrix_override: + log.warning("Matrix override preserved on %s", container_ns) + else: + new_matrix = new_instance.get("matrix", identity) + cmds.xform(root, matrix=new_matrix, objectSpace=True) + + # Update the parenting + if old_instance.get("parent", None) != new_instance["parent"]: + + parent = to_namespace(new_instance['parent'], set_namespace) + if not cmds.objExists(parent): + log.error("Can't find parent %s", parent) + continue + + # Set the new parent + cmds.lockNode(root, lock=False) + root = cmds.parent(root, parent, relative=True) + cmds.lockNode(root, lock=True) + + # Update the representation + representation_current = container['representation'] + representation_old = old_instance['representation'] + representation_new = new_instance['representation'] + has_representation_override = (representation_current != + representation_old) + + if representation_new == representation_current: + continue + + if has_representation_override: + log.warning("Your scene had local representation " + "overrides within the set. New " + "representations not loaded for %s.", + container_ns) + continue + + # We check it against the current 'loader' in the scene instead + # of the original data of the package that was loaded because + # an Artist might have made scene local overrides + if new_instance['loader'] != container['loader']: + log.warning("Loader is switched - local edits will be " + "lost. Removing: %s", + container_ns) + + # Remove this from the "has been processed" list so it's + # considered as new element and added afterwards. + processed_containers.pop() + processed_namespaces.remove(container_ns) + remove_container(container) + continue + + # Check whether the conversion can be done by the Loader. + # They *must* use the same folder, product and Loader for + # `update_container` to make sense. + repre_ids.add(representation_current) + repre_ids.add(representation_new) + + containers_for_repre_compare.append( + (container, representation_current, representation_new) + ) + + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in ayon_api.get_representations( + project_name, representation_ids=repre_ids + ) + } + repre_parents_by_id = ayon_api.get_representations_parents( + project_name, repre_ids + ) + for ( + container, + repre_current_id, + repre_new_id + ) in containers_for_repre_compare: + current_repre = repre_entities_by_id[repre_current_id] + current_parents = repre_parents_by_id[repre_current_id] + new_repre = repre_entities_by_id[repre_new_id] + new_parents = repre_parents_by_id[repre_new_id] + + is_valid = compare_representations( + current_repre, current_parents, new_repre, new_parents + ) + if not is_valid: + log.error("Skipping: %s. See log for details.", + container["namespace"]) + continue + + new_version = new_parents.version["version"] + update_container(container, version=new_version) + + # Add new assets + all_loaders = discover_loader_plugins() + for representation_id, instances in new_data.items(): + + # Find the compatible loaders + loaders = loaders_from_representation( + all_loaders, representation_id + ) + for instance in instances: + + # Already processed in update functionality + if instance['namespace'] in processed_namespaces: + continue + + container = _add(instance=instance, + representation_id=representation_id, + loaders=loaders, + namespace=set_container['namespace'], + root=set_root) + + # Add to the setdress container + cmds.sets(container, + addElement=set_container['objectName']) + + processed_containers.append(container) + + return processed_containers + + +def compare_representations( + current_repre, current_parents, new_repre, new_parents +): + """Check if the old representation given can be updated + + Due to limitations of the `update_container` function we cannot allow + differences in the following data: + + * Representation name (extension) + * Folder id + * Product id + + If any of those data values differs, the function will raise an + RuntimeError + + Args: + current_repre (dict[str, Any]): Current representation entity. + current_parents (RepresentationParents): Current + representation parents. + new_repre (dict[str, Any]): New representation entity. + new_parents (RepresentationParents): New representation parents. + + Returns: + bool: False if the representation is not invalid else True + + """ + if current_repre["name"] != new_repre["name"]: + log.error("Cannot switch extensions") + return False + + # TODO add better validation e.g. based on parent ids + if current_parents.folder["id"] != new_parents.folder["id"]: + log.error("Changing folders between updates is not supported.") + return False + + if current_parents.product["id"] != new_parents.product["id"]: + log.error("Changing products between updates is not supported.") + return False + + return True diff --git a/client/ayon_maya/api/workfile_template_builder.py b/client/ayon_maya/api/workfile_template_builder.py new file mode 100644 index 00000000..f4f9a349 --- /dev/null +++ b/client/ayon_maya/api/workfile_template_builder.py @@ -0,0 +1,290 @@ +import json + +from maya import cmds + +from ayon_core.pipeline import ( + registered_host, + get_current_folder_path, + AYON_INSTANCE_ID, + AVALON_INSTANCE_ID, +) +from ayon_core.pipeline.workfile.workfile_template_builder import ( + TemplateAlreadyImported, + AbstractTemplateBuilder, + PlaceholderPlugin, + PlaceholderItem, +) +from ayon_core.tools.workfile_template_build import ( + WorkfileBuildPlaceholderDialog, +) + +from .lib import read, imprint, get_main_window + +PLACEHOLDER_SET = "PLACEHOLDERS_SET" + + +class MayaTemplateBuilder(AbstractTemplateBuilder): + """Concrete implementation of AbstractTemplateBuilder for maya""" + + use_legacy_creators = True + + def import_template(self, path): + """Import template into current scene. + Block if a template is already loaded. + + Args: + path (str): A path to current template (usually given by + get_template_preset implementation) + + Returns: + bool: Whether the template was successfully imported or not + """ + + if cmds.objExists(PLACEHOLDER_SET): + raise TemplateAlreadyImported(( + "Build template already loaded\n" + "Clean scene if needed (File > New Scene)" + )) + + cmds.sets(name=PLACEHOLDER_SET, empty=True) + new_nodes = cmds.file( + path, + i=True, + returnNewNodes=True, + preserveReferences=True, + loadReferenceDepth="all", + ) + + # make default cameras non-renderable + default_cameras = [cam for cam in cmds.ls(cameras=True) + if cmds.camera(cam, query=True, startupCamera=True)] + for cam in default_cameras: + if not cmds.attributeQuery("renderable", node=cam, exists=True): + self.log.debug( + "Camera {} has no attribute 'renderable'".format(cam) + ) + continue + cmds.setAttr("{}.renderable".format(cam), 0) + + cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True) + + imported_sets = cmds.ls(new_nodes, set=True) + if not imported_sets: + return True + + # update imported sets information + folder_path = get_current_folder_path() + for node in imported_sets: + if not cmds.attributeQuery("id", node=node, exists=True): + continue + if cmds.getAttr("{}.id".format(node)) not in { + AYON_INSTANCE_ID, AVALON_INSTANCE_ID + }: + continue + if not cmds.attributeQuery("folderPath", node=node, exists=True): + continue + + cmds.setAttr( + "{}.folderPath".format(node), folder_path, type="string") + + return True + + +class MayaPlaceholderPlugin(PlaceholderPlugin): + """Base Placeholder Plugin for Maya with one unified cache. + + Creates a locator as placeholder node, which during populate provide + all of its attributes defined on the locator's transform in + `placeholder.data` and where `placeholder.scene_identifier` is the + full path to the node. + + Inherited classes must still implement `populate_placeholder` + + """ + + use_selection_as_parent = True + item_class = PlaceholderItem + + def _create_placeholder_name(self, placeholder_data): + return self.identifier.replace(".", "_") + + def _collect_scene_placeholders(self): + nodes_by_identifier = self.builder.get_shared_populate_data( + "placeholder_nodes" + ) + if nodes_by_identifier is None: + # Cache placeholder data to shared data + nodes = cmds.ls("*.plugin_identifier", long=True, objectsOnly=True) + + nodes_by_identifier = {} + for node in nodes: + identifier = cmds.getAttr("{}.plugin_identifier".format(node)) + nodes_by_identifier.setdefault(identifier, []).append(node) + + # Set the cache + self.builder.set_shared_populate_data( + "placeholder_nodes", nodes_by_identifier + ) + + return nodes_by_identifier + + def create_placeholder(self, placeholder_data): + + parent = None + if self.use_selection_as_parent: + selection = cmds.ls(selection=True) + if len(selection) > 1: + raise ValueError( + "More than one node is selected. " + "Please select only one to define the parent." + ) + parent = selection[0] if selection else None + + placeholder_data["plugin_identifier"] = self.identifier + placeholder_name = self._create_placeholder_name(placeholder_data) + + placeholder = cmds.spaceLocator(name=placeholder_name)[0] + if parent: + placeholder = cmds.parent(placeholder, selection[0])[0] + + self.imprint(placeholder, placeholder_data) + + def update_placeholder(self, placeholder_item, placeholder_data): + node_name = placeholder_item.scene_identifier + + changed_values = {} + for key, value in placeholder_data.items(): + if value != placeholder_item.data.get(key): + changed_values[key] = value + + # Delete attributes to ensure we imprint new data with correct type + for key in changed_values.keys(): + placeholder_item.data[key] = value + if cmds.attributeQuery(key, node=node_name, exists=True): + attribute = "{}.{}".format(node_name, key) + cmds.deleteAttr(attribute) + + self.imprint(node_name, changed_values) + + def collect_placeholders(self): + placeholders = [] + nodes_by_identifier = self._collect_scene_placeholders() + for node in nodes_by_identifier.get(self.identifier, []): + # TODO do data validations and maybe upgrades if they are invalid + placeholder_data = self.read(node) + placeholders.append( + self.item_class(scene_identifier=node, + data=placeholder_data, + plugin=self) + ) + + return placeholders + + def post_placeholder_process(self, placeholder, failed): + """Cleanup placeholder after load of its corresponding representations. + + Hide placeholder, add them to placeholder set. + Used only by PlaceholderCreateMixin and PlaceholderLoadMixin + + Args: + placeholder (PlaceholderItem): Item which was just used to load + representation. + failed (bool): Loading of representation failed. + """ + # Hide placeholder and add them to placeholder set + node = placeholder.scene_identifier + + # If we just populate the placeholders from current scene, the + # placeholder set will not be created so account for that. + if not cmds.objExists(PLACEHOLDER_SET): + cmds.sets(name=PLACEHOLDER_SET, empty=True) + + cmds.sets(node, addElement=PLACEHOLDER_SET) + cmds.hide(node) + cmds.setAttr("{}.hiddenInOutliner".format(node), True) + + def delete_placeholder(self, placeholder): + """Remove placeholder if building was successful + + Used only by PlaceholderCreateMixin and PlaceholderLoadMixin. + """ + node = placeholder.scene_identifier + + # To avoid that deleting a placeholder node will have Maya delete + # any objectSets the node was a member of we will first remove it + # from any sets it was a member of. This way the `PLACEHOLDERS_SET` + # will survive long enough + sets = cmds.listSets(o=node) or [] + for object_set in sets: + cmds.sets(node, remove=object_set) + + cmds.delete(node) + + def imprint(self, node, data): + """Imprint call for placeholder node""" + + # Complicated data that can't be represented as flat maya attributes + # we write to json strings, e.g. multiselection EnumDef + for key, value in data.items(): + if isinstance(value, (list, tuple, dict)): + data[key] = "JSON::{}".format(json.dumps(value)) + + imprint(node, data) + + def read(self, node): + """Read call for placeholder node""" + + data = read(node) + + # Complicated data that can't be represented as flat maya attributes + # we read from json strings, e.g. multiselection EnumDef + for key, value in data.items(): + if isinstance(value, str) and value.startswith("JSON::"): + value = value[len("JSON::"):] # strip of JSON:: prefix + data[key] = json.loads(value) + + return data + + +def build_workfile_template(*args): + builder = MayaTemplateBuilder(registered_host()) + builder.build_template() + + +def update_workfile_template(*args): + builder = MayaTemplateBuilder(registered_host()) + builder.rebuild_template() + + +def create_placeholder(*args): + host = registered_host() + builder = MayaTemplateBuilder(host) + window = WorkfileBuildPlaceholderDialog(host, builder, + parent=get_main_window()) + window.show() + + +def update_placeholder(*args): + host = registered_host() + builder = MayaTemplateBuilder(host) + placeholder_items_by_id = { + placeholder_item.scene_identifier: placeholder_item + for placeholder_item in builder.get_placeholders() + } + placeholder_items = [] + for node_name in cmds.ls(selection=True, long=True): + if node_name in placeholder_items_by_id: + placeholder_items.append(placeholder_items_by_id[node_name]) + + # TODO show UI at least + if len(placeholder_items) == 0: + raise ValueError("No node selected") + + if len(placeholder_items) > 1: + raise ValueError("Too many selected nodes") + + placeholder_item = placeholder_items[0] + window = WorkfileBuildPlaceholderDialog(host, builder, + parent=get_main_window()) + window.set_update_mode(placeholder_item) + window.exec_() diff --git a/client/ayon_maya/api/workio.py b/client/ayon_maya/api/workio.py new file mode 100644 index 00000000..ff6c11eb --- /dev/null +++ b/client/ayon_maya/api/workio.py @@ -0,0 +1,66 @@ +"""Host API required Work Files tool""" +import os +from maya import cmds + + +def file_extensions(): + return [".ma", ".mb"] + + +def has_unsaved_changes(): + return cmds.file(query=True, modified=True) + + +def save_file(filepath): + cmds.file(rename=filepath) + ext = os.path.splitext(filepath)[1] + if ext == ".mb": + file_type = "mayaBinary" + else: + file_type = "mayaAscii" + cmds.file(save=True, type=file_type) + + +def open_file(filepath): + return cmds.file(filepath, open=True, force=True) + + +def current_file(): + + current_filepath = cmds.file(query=True, sceneName=True) + if not current_filepath: + return None + + return current_filepath + + +def work_root(session): + work_dir = session["AYON_WORKDIR"] + scene_dir = None + + # Query scene file rule from workspace.mel if it exists in WORKDIR + # We are parsing the workspace.mel manually as opposed to temporarily + # setting the Workspace in Maya in a context manager since Maya had a + # tendency to crash on frequently changing the workspace when this + # function was called many times as one scrolled through Work Files assets. + workspace_mel = os.path.join(work_dir, "workspace.mel") + if os.path.exists(workspace_mel): + scene_rule = 'workspace -fr "scene" ' + # We need to use builtins as `open` is overridden by the workio API + open_file = __builtins__["open"] + with open_file(workspace_mel, "r") as f: + for line in f: + if line.strip().startswith(scene_rule): + # remainder == "rule"; + remainder = line[len(scene_rule):] + # scene_dir == rule + scene_dir = remainder.split('"')[1] + else: + # We can't query a workspace that does not exist + # so we return similar to what we do in other hosts. + scene_dir = session.get("AVALON_SCENEDIR") + + if scene_dir: + return os.path.join(work_dir, scene_dir) + else: + return work_dir diff --git a/client/ayon_maya/api/yeti.py b/client/ayon_maya/api/yeti.py new file mode 100644 index 00000000..1526c3a2 --- /dev/null +++ b/client/ayon_maya/api/yeti.py @@ -0,0 +1,101 @@ +from typing import List + +from maya import cmds + + +def get_yeti_user_variables(yeti_shape_node: str) -> List[str]: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + + Returns: + list: Attribute names (for a vector attribute it only lists the top + parent attribute, not the attribute per axis) + """ + + attrs = cmds.listAttr(yeti_shape_node, + userDefined=True, + string=("yetiVariableV_*", + "yetiVariableF_*")) or [] + valid_attrs = [] + for attr in attrs: + attr_type = cmds.attributeQuery(attr, node=yeti_shape_node, + attributeType=True) + if attr.startswith("yetiVariableV_") and attr_type == "double3": + # vector + valid_attrs.append(attr) + elif attr.startswith("yetiVariableF_") and attr_type == "double": + valid_attrs.append(attr) + + return valid_attrs + + +def create_yeti_variable(yeti_shape_node: str, + attr_name: str, + value=None, + force_value: bool = False) -> bool: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + attr_name (str): The fully qualified yeti variable name, e.g. + "yetiVariableF_myfloat" or "yetiVariableV_myvector" + value (object): The value to set (must match the type of the attribute) + When value is None it will ignored and not be set. + force_value (bool): Whether to set the value if the attribute already + exists or not. + + Returns: + bool: Whether the attribute value was set or not. + + """ + exists = cmds.attributeQuery(attr_name, node=yeti_shape_node, exists=True) + if not exists: + if attr_name.startswith("yetiVariableV_"): + _create_vector_yeti_user_variable(yeti_shape_node, attr_name) + if attr_name.startswith("yetiVariableF_"): + _create_float_yeti_user_variable(yeti_shape_node, attr_name) + + if value is not None and (not exists or force_value): + plug = "{}.{}".format(yeti_shape_node, attr_name) + if ( + isinstance(value, (list, tuple)) + and attr_name.startswith("yetiVariableV_") + ): + cmds.setAttr(plug, *value, type="double3") + else: + cmds.setAttr(plug, value) + + return True + return False + + +def _create_vector_yeti_user_variable(yeti_shape_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableV_"): + raise ValueError("Must start with yetiVariableV_") + cmds.addAttr(yeti_shape_node, + longName=attr_name, + attributeType="double3", + cachedInternally=True, + keyable=True) + for axis in "XYZ": + cmds.addAttr(yeti_shape_node, + longName="{}{}".format(attr_name, axis), + attributeType="double", + parent=attr_name, + cachedInternally=True, + keyable=True) + + +def _create_float_yeti_user_variable(yeti_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableF_"): + raise ValueError("Must start with yetiVariableF_") + + cmds.addAttr(yeti_node, + longName=attr_name, + attributeType="double", + cachedInternally=True, + softMinValue=0, + softMaxValue=100, + keyable=True) diff --git a/client/ayon_maya/hooks/pre_auto_load_plugins.py b/client/ayon_maya/hooks/pre_auto_load_plugins.py new file mode 100644 index 00000000..45785ac3 --- /dev/null +++ b/client/ayon_maya/hooks/pre_auto_load_plugins.py @@ -0,0 +1,30 @@ +from ayon_applications import PreLaunchHook, LaunchTypes + + +class MayaPreAutoLoadPlugins(PreLaunchHook): + """Define -noAutoloadPlugins command flag.""" + + # Before AddLastWorkfileToLaunchArgs + order = 9 + app_groups = {"maya"} + launch_types = {LaunchTypes.local} + + def execute(self): + + # Ignore if there's no last workfile to start. + if not self.data.get("start_last_workfile"): + return + + maya_settings = self.data["project_settings"]["maya"] + enabled = maya_settings["explicit_plugins_loading"]["enabled"] + if enabled: + # Force disable the `AddLastWorkfileToLaunchArgs`. + self.data.pop("start_last_workfile") + + # Force post initialization so our dedicated plug-in load can run + # prior to Maya opening a scene file. + key = "AYON_OPEN_WORKFILE_POST_INITIALIZATION" + self.launch_context.env[key] = "1" + + self.log.debug("Explicit plugins loading.") + self.launch_context.launch_args.append("-noAutoloadPlugins") diff --git a/client/ayon_maya/hooks/pre_copy_mel.py b/client/ayon_maya/hooks/pre_copy_mel.py new file mode 100644 index 00000000..c3268b09 --- /dev/null +++ b/client/ayon_maya/hooks/pre_copy_mel.py @@ -0,0 +1,23 @@ +from ayon_applications import PreLaunchHook, LaunchTypes +from ayon_maya.lib import create_workspace_mel + + +class PreCopyMel(PreLaunchHook): + """Copy workspace.mel to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = {"maya", "mayapy"} + launch_types = {LaunchTypes.local} + + def execute(self): + project_entity = self.data["project_entity"] + workdir = self.launch_context.env.get("AYON_WORKDIR") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + project_settings = self.data["project_settings"] + create_workspace_mel( + workdir, project_entity["name"], project_settings + ) diff --git a/client/ayon_maya/hooks/pre_open_workfile_post_initialization.py b/client/ayon_maya/hooks/pre_open_workfile_post_initialization.py new file mode 100644 index 00000000..a54f17c6 --- /dev/null +++ b/client/ayon_maya/hooks/pre_open_workfile_post_initialization.py @@ -0,0 +1,26 @@ +from ayon_applications import PreLaunchHook, LaunchTypes + + +class MayaPreOpenWorkfilePostInitialization(PreLaunchHook): + """Define whether open last workfile should run post initialize.""" + + # Before AddLastWorkfileToLaunchArgs. + order = 9 + app_groups = {"maya"} + launch_types = {LaunchTypes.local} + + def execute(self): + + # Ignore if there's no last workfile to start. + if not self.data.get("start_last_workfile"): + return + + maya_settings = self.data["project_settings"]["maya"] + enabled = maya_settings["open_workfile_post_initialization"] + if enabled: + # Force disable the `AddLastWorkfileToLaunchArgs`. + self.data.pop("start_last_workfile") + + self.log.debug("Opening workfile post initialization.") + key = "AYON_OPEN_WORKFILE_POST_INITIALIZATION" + self.launch_context.env[key] = "1" diff --git a/client/ayon_maya/lib.py b/client/ayon_maya/lib.py new file mode 100644 index 00000000..6fa8dfdc --- /dev/null +++ b/client/ayon_maya/lib.py @@ -0,0 +1,25 @@ +import os +from ayon_core.settings import get_project_settings +from ayon_core.lib import Logger + + +def create_workspace_mel(workdir, project_name, project_settings=None): + dst_filepath = os.path.join(workdir, "workspace.mel") + if os.path.exists(dst_filepath): + return + + if not os.path.exists(workdir): + os.makedirs(workdir) + + if not project_settings: + project_settings = get_project_settings(project_name) + mel_script = project_settings["maya"].get("mel_workspace") + + # Skip if mel script in settings is empty + if not mel_script: + log = Logger.get_logger("create_workspace_mel") + log.debug("File 'workspace.mel' not created. Settings value is empty.") + return + + with open(dst_filepath, "w") as mel_file: + mel_file.write(mel_script) diff --git a/client/ayon_maya/plugins/__init__.py b/client/ayon_maya/plugins/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/client/ayon_maya/plugins/create/convert_legacy.py b/client/ayon_maya/plugins/create/convert_legacy.py new file mode 100644 index 00000000..8616413b --- /dev/null +++ b/client/ayon_maya/plugins/create/convert_legacy.py @@ -0,0 +1,190 @@ +import ayon_api + +from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin +from ayon_maya.api import plugin +from ayon_maya.api.lib import read + +from maya import cmds +from maya.app.renderSetup.model import renderSetup + + +class MayaLegacyConvertor(ProductConvertorPlugin, + plugin.MayaCreatorBase): + """Find and convert any legacy products in the scene. + + This Converter will find all legacy products in the scene and will + transform them to the current system. Since the old products doesn't + retain any information about their original creators, the only mapping + we can do is based on their families. + + Its limitation is that you can have multiple creators creating product + of the same type and there is no way to handle it. This code should + nevertheless cover all creators that came with AYON. + + """ + identifier = "io.openpype.creators.maya.legacy" + + # Cases where the identifier or new product type doesn't correspond to the + # original family on the legacy instances + product_type_mapping = { + "rendering": "io.openpype.creators.maya.renderlayer", + } + + def find_instances(self): + + self.cache_instance_data(self.collection_shared_data) + legacy = self.collection_shared_data.get( + "maya_cached_legacy_instances" + ) + if not legacy: + return + + self.add_convertor_item("Convert legacy instances") + + def convert(self): + self.remove_convertor_item() + + # We can't use the collected shared data cache here + # we re-query it here directly to convert all found. + cache = {} + self.cache_instance_data(cache) + legacy = cache.get("maya_cached_legacy_instances") + if not legacy: + return + + # From all current new style manual creators find the mapping + # from product type to identifier + product_type_to_id = {} + for identifier, creator in self.create_context.creators.items(): + product_type = getattr(creator, "product_type", None) + if not product_type: + continue + + if product_type in product_type_to_id: + # We have a clash of product type -> identifier. Multiple + # new style creators use the same product type + self.log.warning( + "Clash on product type->identifier: {}".format(identifier) + ) + product_type_to_id[product_type] = identifier + + product_type_to_id.update(self.product_type_mapping) + + # We also embed the current 'task' into the instance since legacy + # instances didn't store that data on the instances. The old style + # logic was thus to be live to the current task to begin with. + data = dict() + data["task"] = self.create_context.get_current_task_name() + for product_type, instance_nodes in legacy.items(): + if product_type not in product_type_to_id: + self.log.warning(( + "Unable to convert legacy instance with family '{}'" + " because there is no matching new creator" + ).format(product_type)) + continue + + creator_id = product_type_to_id[product_type] + creator = self.create_context.creators[creator_id] + data["creator_identifier"] = creator_id + + if isinstance(creator, plugin.RenderlayerCreator): + self._convert_per_renderlayer(instance_nodes, data, creator) + else: + self._convert_regular(instance_nodes, data) + + def _convert_regular(self, instance_nodes, data): + # We only imprint the creator identifier for it to identify + # as the new style creator + for instance_node in instance_nodes: + self.imprint_instance_node(instance_node, + data=data.copy()) + + def _convert_per_renderlayer(self, instance_nodes, data, creator): + # Split the instance into an instance per layer + rs = renderSetup.instance() + layers = rs.getRenderLayers() + if not layers: + self.log.error( + "Can't convert legacy renderlayer instance because no existing" + " renderSetup layers exist in the scene." + ) + return + + creator_attribute_names = { + attr_def.key for attr_def in creator.get_instance_attr_defs() + } + + for instance_node in instance_nodes: + + # Ensure we have the new style singleton node generated + # TODO: Make function public + singleton_node = creator._get_singleton_node() + if singleton_node: + self.log.error( + "Can't convert legacy renderlayer instance '{}' because" + " new style instance '{}' already exists".format( + instance_node, + singleton_node + ) + ) + continue + + creator.create_singleton_node() + + # We are creating new nodes to replace the original instance + # Copy the attributes of the original instance to the new node + original_data = read(instance_node) + + # The product type gets converted to the new product type (this + # is due to "rendering" being converted to "renderlayer") + original_data["productType"] = creator.product_type + + # recreate product name as without it would be + # `renderingMain` vs correct `renderMain` + project_name = self.create_context.get_current_project_name() + folder_entities = list(ayon_api.get_folders( + project_name, folder_names=[original_data["asset"]] + )) + if not folder_entities: + cmds.delete(instance_node) + continue + folder_entity = folder_entities[0] + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], data["task"] + ) + + product_name = creator.get_product_name( + project_name, + folder_entity, + task_entity, + original_data["variant"], + ) + original_data["productName"] = product_name + + # Convert to creator attributes when relevant + creator_attributes = {} + for key in list(original_data.keys()): + # Iterate in order of the original attributes to preserve order + # in the output creator attributes + if key in creator_attribute_names: + creator_attributes[key] = original_data.pop(key) + original_data["creator_attributes"] = creator_attributes + + # For layer in maya layers + for layer in layers: + layer_instance_node = creator.find_layer_instance_node(layer) + if not layer_instance_node: + # TODO: Make function public + layer_instance_node = creator._create_layer_instance_node( + layer + ) + + # Transfer the main attributes of the original instance + layer_data = original_data.copy() + layer_data.update(data) + + self.imprint_instance_node(layer_instance_node, + data=layer_data) + + # Delete the legacy instance node + cmds.delete(instance_node) diff --git a/client/ayon_maya/plugins/create/create_animation_pointcache.py b/client/ayon_maya/plugins/create/create_animation_pointcache.py new file mode 100644 index 00000000..ea4cdb57 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_animation_pointcache.py @@ -0,0 +1,138 @@ +from maya import cmds + +from ayon_maya.api import lib, plugin + +from ayon_core.lib import ( + BoolDef, + NumberDef, +) + + +def _get_animation_attr_defs(cls): + """Get Animation generic definitions.""" + defs = lib.collect_animation_defs() + defs.extend( + [ + BoolDef("farm", label="Submit to Farm"), + NumberDef("priority", label="Farm job Priority", default=50), + BoolDef("refresh", label="Refresh viewport during export"), + BoolDef( + "includeParentHierarchy", + label="Include Parent Hierarchy", + tooltip=( + "Whether to include parent hierarchy of nodes in the " + "publish instance." + ) + ), + BoolDef( + "includeUserDefinedAttributes", + label="Include User Defined Attributes", + tooltip=( + "Whether to include all custom maya attributes found " + "on nodes as attributes in the Alembic data." + ) + ), + ] + ) + + return defs + + +def convert_legacy_alembic_creator_attributes(node_data, class_name): + """This is a legacy transfer of creator attributes to publish attributes + for ExtractAlembic/ExtractAnimation plugin. + """ + publish_attributes = node_data["publish_attributes"] + + if class_name in publish_attributes: + return node_data + + attributes = [ + "attr", + "attrPrefix", + "visibleOnly", + "writeColorSets", + "writeFaceSets", + "writeNormals", + "renderableOnly", + "visibleOnly", + "worldSpace", + "renderableOnly" + ] + plugin_attributes = {} + for attr in attributes: + if attr not in node_data["creator_attributes"]: + continue + value = node_data["creator_attributes"].pop(attr) + + plugin_attributes[attr] = value + + publish_attributes[class_name] = plugin_attributes + + return node_data + + +class CreateAnimation(plugin.MayaHiddenCreator): + """Animation output for character rigs + + We hide the animation creator from the UI since the creation of it is + automated upon loading a rig. There's an inventory action to recreate it + for loaded rigs if by chance someone deleted the animation instance. + """ + + identifier = "io.openpype.creators.maya.animation" + name = "animationDefault" + label = "Animation" + product_type = "animation" + icon = "male" + + write_color_sets = False + write_face_sets = False + include_parent_hierarchy = False + include_user_defined_attributes = False + + def read_instance_node(self, node): + node_data = super(CreateAnimation, self).read_instance_node(node) + node_data = convert_legacy_alembic_creator_attributes( + node_data, "ExtractAnimation" + ) + return node_data + + def get_instance_attr_defs(self): + defs = super(CreateAnimation, self).get_instance_attr_defs() + defs += _get_animation_attr_defs(self) + return defs + + +class CreatePointCache(plugin.MayaCreator): + """Alembic pointcache for animated data""" + + identifier = "io.openpype.creators.maya.pointcache" + label = "Pointcache" + product_type = "pointcache" + icon = "gears" + write_color_sets = False + write_face_sets = False + include_user_defined_attributes = False + + def read_instance_node(self, node): + node_data = super(CreatePointCache, self).read_instance_node(node) + node_data = convert_legacy_alembic_creator_attributes( + node_data, "ExtractAlembic" + ) + return node_data + + def get_instance_attr_defs(self): + defs = super(CreatePointCache, self).get_instance_attr_defs() + defs += _get_animation_attr_defs(self) + return defs + + def create(self, product_name, instance_data, pre_create_data): + instance = super(CreatePointCache, self).create( + product_name, instance_data, pre_create_data + ) + instance_node = instance.get("instance_node") + + # For Arnold standin proxy + proxy_set = cmds.sets(name=instance_node + "_proxy_SET", empty=True) + cmds.sets(proxy_set, forceElement=instance_node) diff --git a/client/ayon_maya/plugins/create/create_arnold_scene_source.py b/client/ayon_maya/plugins/create/create_arnold_scene_source.py new file mode 100644 index 00000000..8ae27596 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_arnold_scene_source.py @@ -0,0 +1,112 @@ +from maya import cmds + +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import ( + NumberDef, + BoolDef +) + + +class CreateArnoldSceneSource(plugin.MayaCreator): + """Arnold Scene Source""" + + identifier = "io.openpype.creators.maya.ass" + label = "Arnold Scene Source" + product_type = "ass" + icon = "cube" + settings_name = "CreateAss" + + expandProcedurals = False + motionBlur = True + motionBlurKeys = 2 + motionBlurLength = 0.5 + maskOptions = False + maskCamera = False + maskLight = False + maskShape = False + maskShader = False + maskOverride = False + maskDriver = False + maskFilter = False + maskColor_manager = False + maskOperator = False + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs() + + defs.extend([ + BoolDef("expandProcedural", + label="Expand Procedural", + default=self.expandProcedurals), + BoolDef("motionBlur", + label="Motion Blur", + default=self.motionBlur), + NumberDef("motionBlurKeys", + label="Motion Blur Keys", + decimals=0, + default=self.motionBlurKeys), + NumberDef("motionBlurLength", + label="Motion Blur Length", + decimals=3, + default=self.motionBlurLength), + + # Masks + BoolDef("maskOptions", + label="Export Options", + default=self.maskOptions), + BoolDef("maskCamera", + label="Export Cameras", + default=self.maskCamera), + BoolDef("maskLight", + label="Export Lights", + default=self.maskLight), + BoolDef("maskShape", + label="Export Shapes", + default=self.maskShape), + BoolDef("maskShader", + label="Export Shaders", + default=self.maskShader), + BoolDef("maskOverride", + label="Export Override Nodes", + default=self.maskOverride), + BoolDef("maskDriver", + label="Export Drivers", + default=self.maskDriver), + BoolDef("maskFilter", + label="Export Filters", + default=self.maskFilter), + BoolDef("maskOperator", + label="Export Operators", + default=self.maskOperator), + BoolDef("maskColor_manager", + label="Export Color Managers", + default=self.maskColor_manager), + ]) + + return defs + + +class CreateArnoldSceneSourceProxy(CreateArnoldSceneSource): + """Arnold Scene Source Proxy + + This product type facilitates working with proxy geometry in the viewport. + """ + + identifier = "io.openpype.creators.maya.assproxy" + label = "Arnold Scene Source Proxy" + product_type = "assProxy" + icon = "cube" + + def create(self, product_name, instance_data, pre_create_data): + instance = super(CreateArnoldSceneSource, self).create( + product_name, instance_data, pre_create_data + ) + + instance_node = instance.get("instance_node") + + proxy = cmds.sets(name=instance_node + "_proxy_SET", empty=True) + cmds.sets([proxy], forceElement=instance_node) diff --git a/client/ayon_maya/plugins/create/create_assembly.py b/client/ayon_maya/plugins/create/create_assembly.py new file mode 100644 index 00000000..dff04f05 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_assembly.py @@ -0,0 +1,10 @@ +from ayon_maya.api import plugin + + +class CreateAssembly(plugin.MayaCreator): + """A grouped package of loaded content""" + + identifier = "io.openpype.creators.maya.assembly" + label = "Assembly" + product_type = "assembly" + icon = "cubes" diff --git a/client/ayon_maya/plugins/create/create_camera.py b/client/ayon_maya/plugins/create/create_camera.py new file mode 100644 index 00000000..393176f5 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_camera.py @@ -0,0 +1,36 @@ +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import BoolDef + + +class CreateCamera(plugin.MayaCreator): + """Single baked camera""" + + identifier = "io.openpype.creators.maya.camera" + label = "Camera" + product_type = "camera" + icon = "video-camera" + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs() + + defs.extend([ + BoolDef("bakeToWorldSpace", + label="Bake to World-Space", + tooltip="Bake to World-Space", + default=True), + ]) + + return defs + + +class CreateCameraRig(plugin.MayaCreator): + """Complex hierarchy with camera.""" + + identifier = "io.openpype.creators.maya.camerarig" + label = "Camera Rig" + product_type = "camerarig" + icon = "video-camera" diff --git a/client/ayon_maya/plugins/create/create_layout.py b/client/ayon_maya/plugins/create/create_layout.py new file mode 100644 index 00000000..1d9bc2c1 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_layout.py @@ -0,0 +1,21 @@ +from ayon_maya.api import plugin +from ayon_core.lib import BoolDef + + +class CreateLayout(plugin.MayaCreator): + """A grouped package of loaded content""" + + identifier = "io.openpype.creators.maya.layout" + label = "Layout" + product_type = "layout" + icon = "cubes" + + def get_instance_attr_defs(self): + + return [ + BoolDef("groupLoadedAssets", + label="Group Loaded Assets", + tooltip="Enable this when you want to publish group of " + "loaded asset", + default=False) + ] diff --git a/client/ayon_maya/plugins/create/create_look.py b/client/ayon_maya/plugins/create/create_look.py new file mode 100644 index 00000000..1f90d186 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_look.py @@ -0,0 +1,47 @@ +from ayon_maya.api import ( + plugin, + lib +) +from ayon_core.lib import ( + BoolDef, + TextDef +) + + +class CreateLook(plugin.MayaCreator): + """Shader connections defining shape look""" + + identifier = "io.openpype.creators.maya.look" + label = "Look" + product_type = "look" + icon = "paint-brush" + + make_tx = True + rs_tex = False + + def get_instance_attr_defs(self): + + return [ + # TODO: This value should actually get set on create! + TextDef("renderLayer", + # TODO: Bug: Hidden attribute's label is still shown in UI? + hidden=True, + default=lib.get_current_renderlayer(), + label="Renderlayer", + tooltip="Renderlayer to extract the look from"), + BoolDef("maketx", + label="MakeTX", + tooltip="Whether to generate .tx files for your textures", + default=self.make_tx), + BoolDef("rstex", + label="Convert textures to .rstex", + tooltip="Whether to generate Redshift .rstex files for " + "your textures", + default=self.rs_tex) + ] + + def get_pre_create_attr_defs(self): + # Show same attributes on create but include use selection + defs = super(CreateLook, self).get_pre_create_attr_defs() + defs.extend(self.get_instance_attr_defs()) + return defs diff --git a/client/ayon_maya/plugins/create/create_matchmove.py b/client/ayon_maya/plugins/create/create_matchmove.py new file mode 100644 index 00000000..9cb2a3dd --- /dev/null +++ b/client/ayon_maya/plugins/create/create_matchmove.py @@ -0,0 +1,32 @@ +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import BoolDef + + +class CreateMatchmove(plugin.MayaCreator): + """Instance for more complex setup of cameras. + + Might contain multiple cameras, geometries etc. + + It is expected to be extracted into .abc or .ma + """ + + identifier = "io.openpype.creators.maya.matchmove" + label = "Matchmove" + product_type = "matchmove" + icon = "video-camera" + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs() + + defs.extend([ + BoolDef("bakeToWorldSpace", + label="Bake Cameras to World-Space", + tooltip="Bake Cameras to World-Space", + default=True), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_maya_usd.py b/client/ayon_maya/plugins/create/create_maya_usd.py new file mode 100644 index 00000000..19b55384 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_maya_usd.py @@ -0,0 +1,102 @@ +from ayon_maya.api import plugin, lib +from ayon_core.lib import ( + BoolDef, + EnumDef, + TextDef +) + +from maya import cmds + + +class CreateMayaUsd(plugin.MayaCreator): + """Create Maya USD Export""" + + identifier = "io.openpype.creators.maya.mayausd" + label = "Maya USD" + product_type = "usd" + icon = "cubes" + description = "Create Maya USD Export" + + cache = {} + + def get_publish_families(self): + return ["usd", "mayaUsd"] + + def get_instance_attr_defs(self): + + if "jobContextItems" not in self.cache: + # Query once instead of per instance + job_context_items = {} + try: + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + job_context_items = { + cmds.mayaUSDListJobContexts(jobContext=name): name + for name in cmds.mayaUSDListJobContexts(export=True) or [] + } + except RuntimeError: + # Likely `mayaUsdPlugin` plug-in not available + self.log.warning("Unable to retrieve available job " + "contexts for `mayaUsdPlugin` exports") + + if not job_context_items: + # enumdef multiselection may not be empty + job_context_items = [""] + + self.cache["jobContextItems"] = job_context_items + + defs = lib.collect_animation_defs() + defs.extend([ + EnumDef("defaultUSDFormat", + label="File format", + items={ + "usdc": "Binary", + "usda": "ASCII" + }, + default="usdc"), + BoolDef("stripNamespaces", + label="Strip Namespaces", + tooltip=( + "Remove namespaces during export. By default, " + "namespaces are exported to the USD file in the " + "following format: nameSpaceExample_pPlatonic1" + ), + default=True), + BoolDef("mergeTransformAndShape", + label="Merge Transform and Shape", + tooltip=( + "Combine Maya transform and shape into a single USD" + "prim that has transform and geometry, for all" + " \"geometric primitives\" (gprims).\n" + "This results in smaller and faster scenes. Gprims " + "will be \"unpacked\" back into transform and shape " + "nodes when imported into Maya from USD." + ), + default=True), + BoolDef("includeUserDefinedAttributes", + label="Include User Defined Attributes", + tooltip=( + "Whether to include all custom maya attributes found " + "on nodes as metadata (userProperties) in USD." + ), + default=False), + TextDef("attr", + label="Custom Attributes", + default="", + placeholder="attr1, attr2"), + TextDef("attrPrefix", + label="Custom Attributes Prefix", + default="", + placeholder="prefix1, prefix2"), + EnumDef("jobContext", + label="Job Context", + items=self.cache["jobContextItems"], + tooltip=( + "Specifies an additional export context to handle.\n" + "These usually contain extra schemas, primitives,\n" + "and materials that are to be exported for a " + "specific\ntask, a target renderer for example." + ), + multiselection=True), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_mayascene.py b/client/ayon_maya/plugins/create/create_mayascene.py new file mode 100644 index 00000000..9913efc0 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_mayascene.py @@ -0,0 +1,11 @@ +from ayon_maya.api import plugin + + +class CreateMayaScene(plugin.MayaCreator): + """Raw Maya Scene file export""" + + identifier = "io.openpype.creators.maya.mayascene" + name = "mayaScene" + label = "Maya Scene" + product_type = "mayaScene" + icon = "file-archive-o" diff --git a/client/ayon_maya/plugins/create/create_model.py b/client/ayon_maya/plugins/create/create_model.py new file mode 100644 index 00000000..87696c58 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_model.py @@ -0,0 +1,43 @@ +from ayon_maya.api import plugin +from ayon_core.lib import ( + BoolDef, + TextDef +) + + +class CreateModel(plugin.MayaCreator): + """Polygonal static geometry""" + + identifier = "io.openpype.creators.maya.model" + label = "Model" + product_type = "model" + icon = "cube" + default_variants = ["Main", "Proxy", "_MD", "_HD", "_LD"] + + write_color_sets = False + write_face_sets = False + + def get_instance_attr_defs(self): + + return [ + BoolDef("writeColorSets", + label="Write vertex colors", + tooltip="Write vertex colors with the geometry", + default=self.write_color_sets), + BoolDef("writeFaceSets", + label="Write face sets", + tooltip="Write face sets with the geometry", + default=self.write_face_sets), + BoolDef("includeParentHierarchy", + label="Include Parent Hierarchy", + tooltip="Whether to include parent hierarchy of nodes in " + "the publish instance", + default=False), + TextDef("attr", + label="Custom Attributes", + default="", + placeholder="attr1, attr2"), + TextDef("attrPrefix", + label="Custom Attributes Prefix", + placeholder="prefix1, prefix2") + ] diff --git a/client/ayon_maya/plugins/create/create_multishot_layout.py b/client/ayon_maya/plugins/create/create_multishot_layout.py new file mode 100644 index 00000000..52298231 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_multishot_layout.py @@ -0,0 +1,223 @@ +import collections + +from ayon_api import ( + get_folder_by_name, + get_folder_by_path, + get_folders, + get_tasks, +) +from maya import cmds # noqa: F401 + +from ayon_maya.api import plugin +from ayon_core.lib import BoolDef, EnumDef, TextDef +from ayon_core.pipeline import ( + Creator, + get_current_folder_path, + get_current_project_name, +) +from ayon_core.pipeline.create import CreatorError + + +class CreateMultishotLayout(plugin.MayaCreator): + """Create a multi-shot layout in the Maya scene. + + This creator will create a Camera Sequencer in the Maya scene based on + the shots found under the specified folder. The shots will be added to + the sequencer in the order of their clipIn and clipOut values. For each + shot a Layout will be created. + + """ + identifier = "io.openpype.creators.maya.multishotlayout" + label = "Multi-shot Layout" + product_type = "layout" + icon = "project-diagram" + + def get_pre_create_attr_defs(self): + # Present artist with a list of parents of the current context + # to choose from. This will be used to get the shots under the + # selected folder to create the Camera Sequencer. + + """ + Todo: `get_folder_by_name` should be switched to `get_folder_by_path` + once the fork to pure AYON is done. + + Warning: this will not work for projects where the folder name + is not unique across the project until the switch mentioned + above is done. + """ + + project_name = get_current_project_name() + folder_path = get_current_folder_path() + if "/" in folder_path: + current_folder = get_folder_by_path(project_name, folder_path) + else: + current_folder = get_folder_by_name( + project_name, folder_name=folder_path + ) + + current_path_parts = current_folder["path"].split("/") + + # populate the list with parents of the current folder + # this will create menu items like: + # [ + # { + # "value": "", + # "label": "project (shots directly under the project)" + # }, { + # "value": "shots/shot_01", "label": "shot_01 (current)" + # }, { + # "value": "shots", "label": "shots" + # } + # ] + + # add the project as the first item + items_with_label = [ + { + "label": f"{self.project_name} " + "(shots directly under the project)", + "value": "" + } + ] + + # go through the current folder path and add each part to the list, + # but mark the current folder. + for part_idx, part in enumerate(current_path_parts): + label = part + if label == current_folder["name"]: + label = f"{label} (current)" + + value = "/".join(current_path_parts[:part_idx + 1]) + + items_with_label.append({"label": label, "value": value}) + + return [ + EnumDef("shotParent", + default=current_folder["name"], + label="Shot Parent Folder", + items=items_with_label, + ), + BoolDef("groupLoadedAssets", + label="Group Loaded Assets", + tooltip="Enable this when you want to publish group of " + "loaded asset", + default=False), + TextDef("taskName", + label="Associated Task Name", + tooltip=("Task name to be associated " + "with the created Layout"), + default="layout"), + ] + + def create(self, product_name, instance_data, pre_create_data): + shots = list( + self.get_related_shots(folder_path=pre_create_data["shotParent"]) + ) + if not shots: + # There are no shot folders under the specified folder. + # We are raising an error here but in the future we might + # want to create a new shot folders by publishing the layouts + # and shot defined in the sequencer. Sort of editorial publish + # in side of Maya. + raise CreatorError(( + "No shots found under the specified " + f"folder: {pre_create_data['shotParent']}.")) + + # Get layout creator + layout_creator_id = "io.openpype.creators.maya.layout" + layout_creator: Creator = self.create_context.creators.get( + layout_creator_id) + if not layout_creator: + raise CreatorError( + f"Creator {layout_creator_id} not found.") + + folder_ids = {s["id"] for s in shots} + folder_entities = get_folders(self.project_name, folder_ids) + task_entities = get_tasks( + self.project_name, folder_ids=folder_ids + ) + task_entities_by_folder_id = collections.defaultdict(dict) + for task_entity in task_entities: + folder_id = task_entity["folderId"] + task_name = task_entity["name"] + task_entities_by_folder_id[folder_id][task_name] = task_entity + + folder_entities_by_id = {fe["id"]: fe for fe in folder_entities} + for shot in shots: + # we are setting shot name to be displayed in the sequencer to + # `shot name (shot label)` if the label is set, otherwise just + # `shot name`. So far, labels are used only when the name is set + # with characters that are not allowed in the shot name. + if not shot["active"]: + continue + + # get task for shot + folder_id = shot["id"] + folder_entity = folder_entities_by_id[folder_id] + task_entities = task_entities_by_folder_id[folder_id] + + layout_task_name = None + layout_task_entity = None + if pre_create_data["taskName"] in task_entities: + layout_task_name = pre_create_data["taskName"] + layout_task_entity = task_entities[layout_task_name] + + shot_name = f"{shot['name']}%s" % ( + f" ({shot['label']})" if shot["label"] else "") + cmds.shot(sequenceStartTime=shot["attrib"]["clipIn"], + sequenceEndTime=shot["attrib"]["clipOut"], + shotName=shot_name) + + # Create layout instance by the layout creator + + instance_data = { + "folderPath": shot["path"], + "variant": layout_creator.get_default_variant() + } + if layout_task_name: + instance_data["task"] = layout_task_name + + layout_creator.create( + product_name=layout_creator.get_product_name( + self.project_name, + folder_entity, + layout_task_entity, + layout_creator.get_default_variant(), + ), + instance_data=instance_data, + pre_create_data={ + "groupLoadedAssets": pre_create_data["groupLoadedAssets"] + } + ) + + def get_related_shots(self, folder_path: str): + """Get all shots related to the current folder. + + Get all folders of type Shot under specified folder. + + Args: + folder_path (str): Path of the folder. + + Returns: + list: List of dicts with folder data. + + """ + # if folder_path is None, project is selected as a root + # and its name is used as a parent id + parent_id = self.project_name + if folder_path: + current_folder = get_folder_by_path( + project_name=self.project_name, + folder_path=folder_path, + ) + parent_id = current_folder["id"] + + # get all child folders of the current one + return get_folders( + project_name=self.project_name, + parent_ids=[parent_id], + fields=[ + "attrib.clipIn", "attrib.clipOut", + "attrib.frameStart", "attrib.frameEnd", + "name", "label", "path", "folderType", "id" + ] + ) diff --git a/client/ayon_maya/plugins/create/create_multiverse_look.py b/client/ayon_maya/plugins/create/create_multiverse_look.py new file mode 100644 index 00000000..f2dcb771 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_multiverse_look.py @@ -0,0 +1,27 @@ +from ayon_maya.api import plugin +from ayon_core.lib import ( + BoolDef, + EnumDef +) + + +class CreateMultiverseLook(plugin.MayaCreator): + """Create Multiverse Look""" + + identifier = "io.openpype.creators.maya.mvlook" + label = "Multiverse Look" + product_type = "mvLook" + icon = "cubes" + + def get_instance_attr_defs(self): + + return [ + EnumDef("fileFormat", + label="File Format", + tooltip="USD export file format", + items=["usda", "usd"], + default="usda"), + BoolDef("publishMipMap", + label="Publish MipMap", + default=True), + ] diff --git a/client/ayon_maya/plugins/create/create_multiverse_usd.py b/client/ayon_maya/plugins/create/create_multiverse_usd.py new file mode 100644 index 00000000..bdcea4cd --- /dev/null +++ b/client/ayon_maya/plugins/create/create_multiverse_usd.py @@ -0,0 +1,139 @@ +from ayon_maya.api import plugin, lib +from ayon_core.lib import ( + BoolDef, + NumberDef, + TextDef, + EnumDef +) + + +class CreateMultiverseUsd(plugin.MayaCreator): + """Create Multiverse USD Asset""" + + identifier = "io.openpype.creators.maya.mvusdasset" + label = "Multiverse USD Asset" + product_type = "usd" + icon = "cubes" + description = "Create Multiverse USD Asset" + + def get_publish_families(self): + return ["usd", "mvUsd"] + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs(fps=True) + defs.extend([ + EnumDef("fileFormat", + label="File format", + items=["usd", "usda", "usdz"], + default="usd"), + BoolDef("stripNamespaces", + label="Strip Namespaces", + default=True), + BoolDef("mergeTransformAndShape", + label="Merge Transform and Shape", + default=False), + BoolDef("writeAncestors", + label="Write Ancestors", + default=True), + BoolDef("flattenParentXforms", + label="Flatten Parent Xforms", + default=False), + BoolDef("writeSparseOverrides", + label="Write Sparse Overrides", + default=False), + BoolDef("useMetaPrimPath", + label="Use Meta Prim Path", + default=False), + TextDef("customRootPath", + label="Custom Root Path", + default=''), + TextDef("customAttributes", + label="Custom Attributes", + tooltip="Comma-separated list of attribute names", + default=''), + TextDef("nodeTypesToIgnore", + label="Node Types to Ignore", + tooltip="Comma-separated list of node types to be ignored", + default=''), + BoolDef("writeMeshes", + label="Write Meshes", + default=True), + BoolDef("writeCurves", + label="Write Curves", + default=True), + BoolDef("writeParticles", + label="Write Particles", + default=True), + BoolDef("writeCameras", + label="Write Cameras", + default=False), + BoolDef("writeLights", + label="Write Lights", + default=False), + BoolDef("writeJoints", + label="Write Joints", + default=False), + BoolDef("writeCollections", + label="Write Collections", + default=False), + BoolDef("writePositions", + label="Write Positions", + default=True), + BoolDef("writeNormals", + label="Write Normals", + default=True), + BoolDef("writeUVs", + label="Write UVs", + default=True), + BoolDef("writeColorSets", + label="Write Color Sets", + default=False), + BoolDef("writeTangents", + label="Write Tangents", + default=False), + BoolDef("writeRefPositions", + label="Write Ref Positions", + default=True), + BoolDef("writeBlendShapes", + label="Write BlendShapes", + default=False), + BoolDef("writeDisplayColor", + label="Write Display Color", + default=True), + BoolDef("writeSkinWeights", + label="Write Skin Weights", + default=False), + BoolDef("writeMaterialAssignment", + label="Write Material Assignment", + default=False), + BoolDef("writeHardwareShader", + label="Write Hardware Shader", + default=False), + BoolDef("writeShadingNetworks", + label="Write Shading Networks", + default=False), + BoolDef("writeTransformMatrix", + label="Write Transform Matrix", + default=True), + BoolDef("writeUsdAttributes", + label="Write USD Attributes", + default=True), + BoolDef("writeInstancesAsReferences", + label="Write Instances as References", + default=False), + BoolDef("timeVaryingTopology", + label="Time Varying Topology", + default=False), + TextDef("customMaterialNamespace", + label="Custom Material Namespace", + default=''), + NumberDef("numTimeSamples", + label="Num Time Samples", + default=1), + NumberDef("timeSamplesSpan", + label="Time Samples Span", + default=0.0), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_multiverse_usd_comp.py b/client/ayon_maya/plugins/create/create_multiverse_usd_comp.py new file mode 100644 index 00000000..2459704d --- /dev/null +++ b/client/ayon_maya/plugins/create/create_multiverse_usd_comp.py @@ -0,0 +1,48 @@ +from ayon_maya.api import plugin, lib +from ayon_core.lib import ( + BoolDef, + NumberDef, + EnumDef +) + + +class CreateMultiverseUsdComp(plugin.MayaCreator): + """Create Multiverse USD Composition""" + + identifier = "io.openpype.creators.maya.mvusdcomposition" + label = "Multiverse USD Composition" + product_type = "mvUsdComposition" + icon = "cubes" + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs(fps=True) + defs.extend([ + EnumDef("fileFormat", + label="File format", + items=["usd", "usda"], + default="usd"), + BoolDef("stripNamespaces", + label="Strip Namespaces", + default=False), + BoolDef("mergeTransformAndShape", + label="Merge Transform and Shape", + default=False), + BoolDef("flattenContent", + label="Flatten Content", + default=False), + BoolDef("writeAsCompoundLayers", + label="Write As Compound Layers", + default=False), + BoolDef("writePendingOverrides", + label="Write Pending Overrides", + default=False), + NumberDef("numTimeSamples", + label="Num Time Samples", + default=1), + NumberDef("timeSamplesSpan", + label="Time Samples Span", + default=0.0), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_multiverse_usd_over.py b/client/ayon_maya/plugins/create/create_multiverse_usd_over.py new file mode 100644 index 00000000..b070daf5 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_multiverse_usd_over.py @@ -0,0 +1,59 @@ +from ayon_maya.api import plugin, lib +from ayon_core.lib import ( + BoolDef, + NumberDef, + EnumDef +) + + +class CreateMultiverseUsdOver(plugin.MayaCreator): + """Create Multiverse USD Override""" + + identifier = "io.openpype.creators.maya.mvusdoverride" + label = "Multiverse USD Override" + product_type = "mvUsdOverride" + icon = "cubes" + + def get_instance_attr_defs(self): + defs = lib.collect_animation_defs(fps=True) + defs.extend([ + EnumDef("fileFormat", + label="File format", + items=["usd", "usda"], + default="usd"), + BoolDef("writeAll", + label="Write All", + default=False), + BoolDef("writeTransforms", + label="Write Transforms", + default=True), + BoolDef("writeVisibility", + label="Write Visibility", + default=True), + BoolDef("writeAttributes", + label="Write Attributes", + default=True), + BoolDef("writeMaterials", + label="Write Materials", + default=True), + BoolDef("writeVariants", + label="Write Variants", + default=True), + BoolDef("writeVariantsDefinition", + label="Write Variants Definition", + default=True), + BoolDef("writeActiveState", + label="Write Active State", + default=True), + BoolDef("writeNamespaces", + label="Write Namespaces", + default=False), + NumberDef("numTimeSamples", + label="Num Time Samples", + default=1), + NumberDef("timeSamplesSpan", + label="Time Samples Span", + default=0.0), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_proxy_abc.py b/client/ayon_maya/plugins/create/create_proxy_abc.py new file mode 100644 index 00000000..431f1139 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_proxy_abc.py @@ -0,0 +1,50 @@ +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import ( + BoolDef, + TextDef +) + + +class CreateProxyAlembic(plugin.MayaCreator): + """Proxy Alembic for animated data""" + + identifier = "io.openpype.creators.maya.proxyabc" + label = "Proxy Alembic" + product_type = "proxyAbc" + icon = "gears" + write_color_sets = False + write_face_sets = False + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs() + + defs.extend([ + BoolDef("writeColorSets", + label="Write vertex colors", + tooltip="Write vertex colors with the geometry", + default=self.write_color_sets), + BoolDef("writeFaceSets", + label="Write face sets", + tooltip="Write face sets with the geometry", + default=self.write_face_sets), + BoolDef("worldSpace", + label="World-Space Export", + default=True), + TextDef("nameSuffix", + label="Name Suffix for Bounding Box", + default="_BBox", + placeholder="_BBox"), + TextDef("attr", + label="Custom Attributes", + default="", + placeholder="attr1, attr2"), + TextDef("attrPrefix", + label="Custom Attributes Prefix", + placeholder="prefix1, prefix2") + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_redshift_proxy.py b/client/ayon_maya/plugins/create/create_redshift_proxy.py new file mode 100644 index 00000000..c4cc874a --- /dev/null +++ b/client/ayon_maya/plugins/create/create_redshift_proxy.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +"""Creator of Redshift proxy product types.""" + +from ayon_maya.api import plugin, lib +from ayon_core.lib import BoolDef + + +class CreateRedshiftProxy(plugin.MayaCreator): + """Create instance of Redshift Proxy product.""" + + identifier = "io.openpype.creators.maya.redshiftproxy" + label = "Redshift Proxy" + product_type = "redshiftproxy" + icon = "gears" + + def get_instance_attr_defs(self): + + defs = [ + BoolDef("animation", + label="Export animation", + default=False) + ] + + defs.extend(lib.collect_animation_defs()) + return defs diff --git a/client/ayon_maya/plugins/create/create_render.py b/client/ayon_maya/plugins/create/create_render.py new file mode 100644 index 00000000..5defee7d --- /dev/null +++ b/client/ayon_maya/plugins/create/create_render.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +"""Create ``Render`` instance in Maya.""" + +from ayon_maya.api import ( + lib_rendersettings, + plugin +) +from ayon_core.pipeline import CreatorError +from ayon_core.lib import ( + BoolDef, + NumberDef, +) + + +class CreateRenderlayer(plugin.RenderlayerCreator): + """Create and manages renderlayer product per renderLayer in workfile. + + This generates a single node in the scene which tells the Creator to if + it exists collect Maya rendersetup renderlayers as individual instances. + As such, triggering create doesn't actually create the instance node per + layer but only the node which tells the Creator it may now collect + the renderlayers. + + """ + + identifier = "io.openpype.creators.maya.renderlayer" + product_type = "renderlayer" + label = "Render" + icon = "eye" + + layer_instance_prefix = "render" + singleton_node_name = "renderingMain" + + render_settings = {} + + @classmethod + def apply_settings(cls, project_settings): + cls.render_settings = project_settings["maya"]["render_settings"] + + def create(self, product_name, instance_data, pre_create_data): + # Only allow a single render instance to exist + if self._get_singleton_node(): + raise CreatorError( + "A Render instance already exists - only one can be " + "configured.\n\n" + "To render multiple render layers, create extra Render Setup " + "Layers via Maya's Render Setup UI.\n" + "Then refresh the publisher to detect the new layers for " + "rendering.\n\n" + "With a render instance present all Render Setup layers in " + "your workfile are renderable instances.") + + # Apply default project render settings on create + if self.render_settings.get("apply_render_settings"): + lib_rendersettings.RenderSettings().set_default_renderer_settings() + + super(CreateRenderlayer, self).create(product_name, + instance_data, + pre_create_data) + + def get_instance_attr_defs(self): + """Create instance settings.""" + + return [ + BoolDef("review", + label="Review", + tooltip="Mark as reviewable", + default=True), + BoolDef("extendFrames", + label="Extend Frames", + tooltip="Extends the frames on top of the previous " + "publish.\nIf the previous was 1001-1050 and you " + "would now submit 1020-1070 only the new frames " + "1051-1070 would be rendered and published " + "together with the previously rendered frames.\n" + "If 'overrideExistingFrame' is enabled it *will* " + "render any existing frames.", + default=False), + BoolDef("overrideExistingFrame", + label="Override Existing Frame", + tooltip="Override existing rendered frames " + "(if they exist).", + default=True), + + # TODO: Should these move to submit_maya_deadline plugin? + # Tile rendering + BoolDef("tileRendering", + label="Enable tiled rendering", + default=False), + NumberDef("tilesX", + label="Tiles X", + default=2, + minimum=1, + decimals=0), + NumberDef("tilesY", + label="Tiles Y", + default=2, + minimum=1, + decimals=0), + + # Additional settings + BoolDef("convertToScanline", + label="Convert to Scanline", + tooltip="Convert the output images to scanline images", + default=False), + BoolDef("useReferencedAovs", + label="Use Referenced AOVs", + tooltip="Consider the AOVs from referenced scenes as well", + default=False), + + BoolDef("renderSetupIncludeLights", + label="Render Setup Include Lights", + default=self.render_settings.get("enable_all_lights", + False)) + ] diff --git a/client/ayon_maya/plugins/create/create_rendersetup.py b/client/ayon_maya/plugins/create/create_rendersetup.py new file mode 100644 index 00000000..415ab4ff --- /dev/null +++ b/client/ayon_maya/plugins/create/create_rendersetup.py @@ -0,0 +1,31 @@ +from ayon_maya.api import plugin +from ayon_core.pipeline import CreatorError + + +class CreateRenderSetup(plugin.MayaCreator): + """Create rendersetup template json data""" + + identifier = "io.openpype.creators.maya.rendersetup" + label = "Render Setup Preset" + product_type = "rendersetup" + icon = "tablet" + + def get_pre_create_attr_defs(self): + # Do not show the "use_selection" setting from parent class + return [] + + def create(self, product_name, instance_data, pre_create_data): + + existing_instance = None + for instance in self.create_context.instances: + if instance.product_type == self.product_type: + existing_instance = instance + break + + if existing_instance: + raise CreatorError("A RenderSetup instance already exists - only " + "one can be configured.") + + super(CreateRenderSetup, self).create(product_name, + instance_data, + pre_create_data) diff --git a/client/ayon_maya/plugins/create/create_review.py b/client/ayon_maya/plugins/create/create_review.py new file mode 100644 index 00000000..26fad91e --- /dev/null +++ b/client/ayon_maya/plugins/create/create_review.py @@ -0,0 +1,148 @@ +import json + +from maya import cmds +import ayon_api + +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import ( + BoolDef, + NumberDef, + EnumDef +) +from ayon_core.pipeline import CreatedInstance + +TRANSPARENCIES = [ + "preset", + "simple", + "object sorting", + "weighted average", + "depth peeling", + "alpha cut" +] + + +class CreateReview(plugin.MayaCreator): + """Playblast reviewable""" + + identifier = "io.openpype.creators.maya.review" + label = "Review" + product_type = "review" + icon = "video-camera" + + useMayaTimeline = True + panZoom = False + + # Overriding "create" method to prefill values from settings. + def create(self, product_name, instance_data, pre_create_data): + + members = list() + if pre_create_data.get("use_selection"): + members = cmds.ls(selection=True) + + project_name = self.project_name + folder_path = instance_data["folderPath"] + task_name = instance_data["task"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name, fields={"taskType"} + ) + preset = lib.get_capture_preset( + task_name, + task_entity["taskType"], + product_name, + self.project_settings, + self.log + ) + self.log.debug( + "Using preset: {}".format( + json.dumps(preset, indent=4, sort_keys=True) + ) + ) + + with lib.undo_chunk(): + instance_node = cmds.sets(members, name=product_name) + instance_data["instance_node"] = instance_node + instance = CreatedInstance( + self.product_type, + product_name, + instance_data, + self) + + creator_attribute_defs_by_key = { + x.key: x for x in instance.creator_attribute_defs + } + mapping = { + "review_width": preset["Resolution"]["width"], + "review_height": preset["Resolution"]["height"], + "isolate": preset["Generic"]["isolate_view"], + "imagePlane": preset["ViewportOptions"]["imagePlane"], + "panZoom": preset["Generic"]["pan_zoom"] + } + for key, value in mapping.items(): + creator_attribute_defs_by_key[key].default = value + + self._add_instance_to_context(instance) + + self.imprint_instance_node(instance_node, + data=instance.data_to_store()) + return instance + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs() + + # Option for using Maya or folder frame range in settings. + if not self.useMayaTimeline: + # Update the defaults to be the folder frame range + frame_range = lib.get_frame_range() + defs_by_key = {attr_def.key: attr_def for attr_def in defs} + for key, value in frame_range.items(): + if key not in defs_by_key: + raise RuntimeError("Attribute definition not found to be " + "updated for key: {}".format(key)) + attr_def = defs_by_key[key] + attr_def.default = value + + defs.extend([ + NumberDef("review_width", + label="Review width", + tooltip="A value of zero will use the folder resolution.", + decimals=0, + minimum=0, + default=0), + NumberDef("review_height", + label="Review height", + tooltip="A value of zero will use the folder resolution.", + decimals=0, + minimum=0, + default=0), + BoolDef("keepImages", + label="Keep Images", + tooltip="Whether to also publish along the image sequence " + "next to the video reviewable.", + default=False), + BoolDef("isolate", + label="Isolate render members of instance", + tooltip="When enabled only the members of the instance " + "will be included in the playblast review.", + default=False), + BoolDef("imagePlane", + label="Show Image Plane", + default=True), + EnumDef("transparency", + label="Transparency", + items=TRANSPARENCIES), + BoolDef("panZoom", + label="Enable camera pan/zoom", + default=True), + EnumDef("displayLights", + label="Display Lights", + items=lib.DISPLAY_LIGHTS_ENUM), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_rig.py b/client/ayon_maya/plugins/create/create_rig.py new file mode 100644 index 00000000..135e51bc --- /dev/null +++ b/client/ayon_maya/plugins/create/create_rig.py @@ -0,0 +1,32 @@ +from maya import cmds + +from ayon_maya.api import plugin + + +class CreateRig(plugin.MayaCreator): + """Artist-friendly rig with controls to direct motion""" + + identifier = "io.openpype.creators.maya.rig" + label = "Rig" + product_type = "rig" + icon = "wheelchair" + + def create(self, product_name, instance_data, pre_create_data): + + instance = super(CreateRig, self).create(product_name, + instance_data, + pre_create_data) + + instance_node = instance.get("instance_node") + + self.log.info("Creating Rig instance set up ...") + # TODO:change name (_controls_SET -> _rigs_SET) + controls = cmds.sets(name=product_name + "_controls_SET", empty=True) + # TODO:change name (_out_SET -> _geo_SET) + pointcache = cmds.sets(name=product_name + "_out_SET", empty=True) + skeleton = cmds.sets( + name=product_name + "_skeletonAnim_SET", empty=True) + skeleton_mesh = cmds.sets( + name=product_name + "_skeletonMesh_SET", empty=True) + cmds.sets([controls, pointcache, + skeleton, skeleton_mesh], forceElement=instance_node) diff --git a/client/ayon_maya/plugins/create/create_setdress.py b/client/ayon_maya/plugins/create/create_setdress.py new file mode 100644 index 00000000..12532e07 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_setdress.py @@ -0,0 +1,19 @@ +from ayon_maya.api import plugin +from ayon_core.lib import BoolDef + + +class CreateSetDress(plugin.MayaCreator): + """A grouped package of loaded content""" + + identifier = "io.openpype.creators.maya.setdress" + label = "Set Dress" + product_type = "setdress" + icon = "cubes" + default_variants = ["Main", "Anim"] + + def get_instance_attr_defs(self): + return [ + BoolDef("exactSetMembersOnly", + label="Exact Set Members Only", + default=True) + ] diff --git a/client/ayon_maya/plugins/create/create_unreal_skeletalmesh.py b/client/ayon_maya/plugins/create/create_unreal_skeletalmesh.py new file mode 100644 index 00000000..a182fe7a --- /dev/null +++ b/client/ayon_maya/plugins/create/create_unreal_skeletalmesh.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +"""Creator for Unreal Skeletal Meshes.""" +from ayon_maya.api import plugin, lib +from ayon_core.lib import ( + BoolDef, + TextDef +) + +from maya import cmds # noqa + + +class CreateUnrealSkeletalMesh(plugin.MayaCreator): + """Unreal Static Meshes with collisions.""" + + identifier = "io.openpype.creators.maya.unrealskeletalmesh" + label = "Unreal - Skeletal Mesh" + product_type = "skeletalMesh" + icon = "thumbs-up" + + # Defined in settings + joint_hints = set() + + def get_dynamic_data( + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance + ): + """ + The default product name templates for Unreal include {asset} and thus + we should pass that along as dynamic data. + """ + dynamic_data = super(CreateUnrealSkeletalMesh, self).get_dynamic_data( + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance + ) + dynamic_data["asset"] = folder_entity["name"] + return dynamic_data + + def create(self, product_name, instance_data, pre_create_data): + + with lib.undo_chunk(): + instance = super(CreateUnrealSkeletalMesh, self).create( + product_name, instance_data, pre_create_data) + instance_node = instance.get("instance_node") + + # We reorganize the geometry that was originally added into the + # set into either 'joints_SET' or 'geometry_SET' based on the + # joint_hints from project settings + members = cmds.sets(instance_node, query=True) or [] + cmds.sets(clear=instance_node) + + geometry_set = cmds.sets(name="geometry_SET", empty=True) + joints_set = cmds.sets(name="joints_SET", empty=True) + + cmds.sets([geometry_set, joints_set], forceElement=instance_node) + + for node in members: + if node in self.joint_hints: + cmds.sets(node, forceElement=joints_set) + else: + cmds.sets(node, forceElement=geometry_set) + + def get_instance_attr_defs(self): + + defs = lib.collect_animation_defs() + + defs.extend([ + BoolDef("renderableOnly", + label="Renderable Only", + tooltip="Only export renderable visible shapes", + default=False), + BoolDef("visibleOnly", + label="Visible Only", + tooltip="Only export dag objects visible during " + "frame range", + default=False), + BoolDef("includeParentHierarchy", + label="Include Parent Hierarchy", + tooltip="Whether to include parent hierarchy of nodes in " + "the publish instance", + default=False), + BoolDef("worldSpace", + label="World-Space Export", + default=True), + BoolDef("refresh", + label="Refresh viewport during export", + default=False), + TextDef("attr", + label="Custom Attributes", + default="", + placeholder="attr1, attr2"), + TextDef("attrPrefix", + label="Custom Attributes Prefix", + placeholder="prefix1, prefix2") + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_unreal_staticmesh.py b/client/ayon_maya/plugins/create/create_unreal_staticmesh.py new file mode 100644 index 00000000..e5436bca --- /dev/null +++ b/client/ayon_maya/plugins/create/create_unreal_staticmesh.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +"""Creator for Unreal Static Meshes.""" +from ayon_maya.api import plugin, lib +from maya import cmds # noqa + + +class CreateUnrealStaticMesh(plugin.MayaCreator): + """Unreal Static Meshes with collisions.""" + + identifier = "io.openpype.creators.maya.unrealstaticmesh" + label = "Unreal - Static Mesh" + product_type = "staticMesh" + icon = "cube" + + # Defined in settings + collision_prefixes = [] + + def get_dynamic_data( + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance + ): + """ + The default product name templates for Unreal include {asset} and thus + we should pass that along as dynamic data. + """ + dynamic_data = super(CreateUnrealStaticMesh, self).get_dynamic_data( + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance + ) + dynamic_data["asset"] = folder_entity["name"] + return dynamic_data + + def create(self, product_name, instance_data, pre_create_data): + + with lib.undo_chunk(): + instance = super(CreateUnrealStaticMesh, self).create( + product_name, instance_data, pre_create_data) + instance_node = instance.get("instance_node") + + # We reorganize the geometry that was originally added into the + # set into either 'collision_SET' or 'geometry_SET' based on the + # collision_prefixes from project settings + members = cmds.sets(instance_node, query=True) + cmds.sets(clear=instance_node) + + geometry_set = cmds.sets(name="geometry_SET", empty=True) + collisions_set = cmds.sets(name="collisions_SET", empty=True) + + cmds.sets([geometry_set, collisions_set], + forceElement=instance_node) + + members = cmds.ls(members, long=True) or [] + children = cmds.listRelatives(members, allDescendents=True, + fullPath=True) or [] + transforms = cmds.ls(members + children, type="transform") + for transform in transforms: + + if not cmds.listRelatives(transform, + type="shape", + noIntermediate=True): + # Exclude all transforms that have no direct shapes + continue + + if self.has_collision_prefix(transform): + cmds.sets(transform, forceElement=collisions_set) + else: + cmds.sets(transform, forceElement=geometry_set) + + def has_collision_prefix(self, node_path): + """Return whether node name of path matches collision prefix. + + If the node name matches the collision prefix we add it to the + `collisions_SET` instead of the `geometry_SET`. + + Args: + node_path (str): Maya node path. + + Returns: + bool: Whether the node should be considered a collision mesh. + + """ + node_name = node_path.rsplit("|", 1)[-1] + for prefix in self.collision_prefixes: + if node_name.startswith(prefix): + return True + return False diff --git a/client/ayon_maya/plugins/create/create_unreal_yeticache.py b/client/ayon_maya/plugins/create/create_unreal_yeticache.py new file mode 100644 index 00000000..eea866d4 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_unreal_yeticache.py @@ -0,0 +1,39 @@ +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import NumberDef + + +class CreateUnrealYetiCache(plugin.MayaCreator): + """Output for procedural plugin nodes of Yeti """ + + identifier = "io.openpype.creators.maya.unrealyeticache" + label = "Unreal - Yeti Cache" + product_type = "yeticacheUE" + icon = "pagelines" + + def get_instance_attr_defs(self): + + defs = [ + NumberDef("preroll", + label="Preroll", + minimum=0, + default=0, + decimals=0) + ] + + # Add animation data without step and handles + defs.extend(lib.collect_animation_defs()) + remove = {"step", "handleStart", "handleEnd"} + defs = [attr_def for attr_def in defs if attr_def.key not in remove] + + # Add samples after frame range + defs.append( + NumberDef("samples", + label="Samples", + default=3, + decimals=0) + ) + + return defs diff --git a/client/ayon_maya/plugins/create/create_vrayproxy.py b/client/ayon_maya/plugins/create/create_vrayproxy.py new file mode 100644 index 00000000..742e14ac --- /dev/null +++ b/client/ayon_maya/plugins/create/create_vrayproxy.py @@ -0,0 +1,50 @@ +from ayon_maya.api import ( + plugin, + lib +) +from ayon_core.lib import BoolDef + + +class CreateVrayProxy(plugin.MayaCreator): + """Alembic pointcache for animated data""" + + identifier = "io.openpype.creators.maya.vrayproxy" + label = "VRay Proxy" + product_type = "vrayproxy" + icon = "gears" + + vrmesh = True + alembic = True + + def get_instance_attr_defs(self): + + defs = [ + BoolDef("animation", + label="Export Animation", + default=False) + ] + + # Add time range attributes but remove some attributes + # which this instance actually doesn't use + defs.extend(lib.collect_animation_defs()) + remove = {"handleStart", "handleEnd", "step"} + defs = [attr_def for attr_def in defs if attr_def.key not in remove] + + defs.extend([ + BoolDef("vertexColors", + label="Write vertex colors", + tooltip="Write vertex colors with the geometry", + default=False), + BoolDef("vrmesh", + label="Export VRayMesh", + tooltip="Publish a .vrmesh (VRayMesh) file for " + "this VRayProxy", + default=self.vrmesh), + BoolDef("alembic", + label="Export Alembic", + tooltip="Publish a .abc (Alembic) file for " + "this VRayProxy", + default=self.alembic), + ]) + + return defs diff --git a/client/ayon_maya/plugins/create/create_vrayscene.py b/client/ayon_maya/plugins/create/create_vrayscene.py new file mode 100644 index 00000000..11c356fd --- /dev/null +++ b/client/ayon_maya/plugins/create/create_vrayscene.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +"""Create instance of vrayscene.""" + +from ayon_maya.api import ( + lib_rendersettings, + plugin +) +from ayon_core.pipeline import CreatorError +from ayon_core.lib import BoolDef + + +class CreateVRayScene(plugin.RenderlayerCreator): + """Create Vray Scene.""" + + identifier = "io.openpype.creators.maya.vrayscene" + + product_type = "vrayscene" + label = "VRay Scene" + icon = "cubes" + + render_settings = {} + singleton_node_name = "vraysceneMain" + + @classmethod + def apply_settings(cls, project_settings): + cls.render_settings = project_settings["maya"]["render_settings"] + + def create(self, product_name, instance_data, pre_create_data): + # Only allow a single render instance to exist + if self._get_singleton_node(): + raise CreatorError("A Render instance already exists - only " + "one can be configured.") + + super(CreateVRayScene, self).create(product_name, + instance_data, + pre_create_data) + + # Apply default project render settings on create + if self.render_settings.get("apply_render_settings"): + lib_rendersettings.RenderSettings().set_default_renderer_settings() + + def get_instance_attr_defs(self): + """Create instance settings.""" + + return [ + BoolDef("vraySceneMultipleFiles", + label="V-Ray Scene Multiple Files", + default=False), + BoolDef("exportOnFarm", + label="Export on farm", + default=False) + ] diff --git a/client/ayon_maya/plugins/create/create_workfile.py b/client/ayon_maya/plugins/create/create_workfile.py new file mode 100644 index 00000000..e0c94611 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_workfile.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +import ayon_api + +from ayon_core.pipeline import CreatedInstance, AutoCreator +from ayon_maya.api import plugin +from maya import cmds + + +class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.maya.workfile" + label = "Workfile" + product_type = "workfile" + icon = "fa5.file" + + default_variant = "Main" + + def create(self): + + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + project_name = self.project_name + folder_path = self.create_context.get_current_folder_path() + task_name = self.create_context.get_current_task_name() + host_name = self.create_context.host_name + + current_folder_path = None + if current_instance is not None: + current_folder_path = current_instance["folderPath"] + + if current_instance is None: + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + product_name = self.get_product_name( + project_name, + folder_entity, + task_entity, + variant, + host_name, + ) + data = { + "folderPath": folder_path, + "task": task_name, + "variant": variant + } + data.update( + self.get_dynamic_data( + project_name, + folder_entity, + task_entity, + variant, + host_name, + current_instance) + ) + self.log.info("Auto-creating workfile instance...") + current_instance = CreatedInstance( + self.product_type, product_name, data, self + ) + self._add_instance_to_context(current_instance) + elif ( + current_folder_path != folder_path + or current_instance["task"] != task_name + ): + # Update instance context if is not the same + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + product_name = self.get_product_name( + project_name, + folder_entity, + task_entity, + variant, + host_name, + ) + + current_instance["folderPath"] = folder_entity["path"] + current_instance["task"] = task_name + current_instance["productName"] = product_name + + def collect_instances(self): + self.cache_instance_data(self.collection_shared_data) + cached_instances = ( + self.collection_shared_data["maya_cached_instance_data"] + ) + for node in cached_instances.get(self.identifier, []): + node_data = self.read_instance_node(node) + + created_instance = CreatedInstance.from_existing(node_data, self) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + data = created_inst.data_to_store() + node = data.get("instance_node") + if not node: + node = self.create_node() + created_inst["instance_node"] = node + data = created_inst.data_to_store() + + self.imprint_instance_node(node, data) + + def create_node(self): + node = cmds.sets(empty=True, name="workfileMain") + cmds.setAttr(node + ".hiddenInOutliner", True) + return node diff --git a/client/ayon_maya/plugins/create/create_xgen.py b/client/ayon_maya/plugins/create/create_xgen.py new file mode 100644 index 00000000..d13d032a --- /dev/null +++ b/client/ayon_maya/plugins/create/create_xgen.py @@ -0,0 +1,10 @@ +from ayon_maya.api import plugin + + +class CreateXgen(plugin.MayaCreator): + """Xgen""" + + identifier = "io.openpype.creators.maya.xgen" + label = "Xgen" + product_type = "xgen" + icon = "pagelines" diff --git a/client/ayon_maya/plugins/create/create_yeti_cache.py b/client/ayon_maya/plugins/create/create_yeti_cache.py new file mode 100644 index 00000000..8a834f18 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_yeti_cache.py @@ -0,0 +1,39 @@ +from ayon_maya.api import ( + lib, + plugin +) +from ayon_core.lib import NumberDef + + +class CreateYetiCache(plugin.MayaCreator): + """Output for procedural plugin nodes of Yeti """ + + identifier = "io.openpype.creators.maya.yeticache" + label = "Yeti Cache" + product_type = "yeticache" + icon = "pagelines" + + def get_instance_attr_defs(self): + + defs = [ + NumberDef("preroll", + label="Preroll", + minimum=0, + default=0, + decimals=0) + ] + + # Add animation data without step and handles + defs.extend(lib.collect_animation_defs()) + remove = {"step", "handleStart", "handleEnd"} + defs = [attr_def for attr_def in defs if attr_def.key not in remove] + + # Add samples after frame range + defs.append( + NumberDef("samples", + label="Samples", + default=3, + decimals=0) + ) + + return defs diff --git a/client/ayon_maya/plugins/create/create_yeti_rig.py b/client/ayon_maya/plugins/create/create_yeti_rig.py new file mode 100644 index 00000000..c5378dc1 --- /dev/null +++ b/client/ayon_maya/plugins/create/create_yeti_rig.py @@ -0,0 +1,27 @@ +from maya import cmds + +from ayon_maya.api import ( + lib, + plugin +) + + +class CreateYetiRig(plugin.MayaCreator): + """Output for procedural plugin nodes ( Yeti / XGen / etc)""" + + identifier = "io.openpype.creators.maya.yetirig" + label = "Yeti Rig" + product_type = "yetiRig" + icon = "usb" + + def create(self, product_name, instance_data, pre_create_data): + + with lib.undo_chunk(): + instance = super(CreateYetiRig, self).create(product_name, + instance_data, + pre_create_data) + instance_node = instance.get("instance_node") + + self.log.info("Creating Rig instance set up ...") + input_meshes = cmds.sets(name="input_SET", empty=True) + cmds.sets(input_meshes, forceElement=instance_node) diff --git a/client/ayon_maya/plugins/inventory/connect_geometry.py b/client/ayon_maya/plugins/inventory/connect_geometry.py new file mode 100644 index 00000000..ccb88313 --- /dev/null +++ b/client/ayon_maya/plugins/inventory/connect_geometry.py @@ -0,0 +1,158 @@ +from maya import cmds + +from ayon_core.pipeline import InventoryAction, get_repres_contexts +from ayon_maya.api.lib import get_id + + +class ConnectGeometry(InventoryAction): + """Connect geometries within containers. + + Source container will connect to the target containers, by searching for + matching geometry IDs (cbid). + Source containers are of product type: "animation" and "pointcache". + The connection with be done with a live world space blendshape. + """ + + label = "Connect Geometry" + icon = "link" + color = "white" + + def process(self, containers): + # Validate selection is more than 1. + message = ( + "Only 1 container selected. 2+ containers needed for this action." + ) + if len(containers) == 1: + self.display_warning(message) + return + + # Categorize containers by family. + containers_by_product_type = {} + repre_ids = { + container["representation"] + for container in containers + } + repre_contexts_by_id = get_repres_contexts(repre_ids) + for container in containers: + repre_id = container["representation"] + repre_context = repre_contexts_by_id[repre_id] + + product_type = repre_context["product"]["productType"] + + containers_by_product_type.setdefault(product_type, []) + containers_by_product_type[product_type].append(container) + + # Validate to only 1 source container. + source_containers = containers_by_product_type.get("animation", []) + source_containers += containers_by_product_type.get("pointcache", []) + source_container_namespaces = [ + x["namespace"] for x in source_containers + ] + message = ( + "{} animation containers selected:\n\n{}\n\nOnly select 1 of type " + "\"animation\" or \"pointcache\".".format( + len(source_containers), source_container_namespaces + ) + ) + if len(source_containers) != 1: + self.display_warning(message) + return + + source_object = source_containers[0]["objectName"] + + # Collect matching geometry transforms based cbId attribute. + target_containers = [] + for product_type, containers in containers_by_product_type.items(): + if product_type in ["animation", "pointcache"]: + continue + + target_containers.extend(containers) + + source_data = self.get_container_data(source_object) + matches = [] + node_types = set() + for target_container in target_containers: + target_data = self.get_container_data( + target_container["objectName"] + ) + node_types.update(target_data["node_types"]) + for id, transform in target_data["ids"].items(): + source_match = source_data["ids"].get(id) + if source_match: + matches.append([source_match, transform]) + + # Message user about what is about to happen. + if not matches: + self.display_warning("No matching geometries found.") + return + + message = "Connecting geometries:\n\n" + for match in matches: + message += "{} > {}\n".format(match[0], match[1]) + + choice = self.display_warning(message, show_cancel=True) + if choice is False: + return + + # Setup live worldspace blendshape connection. + for source, target in matches: + blendshape = cmds.blendShape(source, target)[0] + cmds.setAttr(blendshape + ".origin", 0) + cmds.setAttr(blendshape + "." + target.split(":")[-1], 1) + + # Update Xgen if in any of the containers. + if "xgmPalette" in node_types: + cmds.xgmPreview() + + def get_container_data(self, container): + """Collects data about the container nodes. + + Args: + container (dict): Container instance. + + Returns: + data (dict): + "node_types": All node types in container nodes. + "ids": If the node is a mesh, we collect its parent transform + id. + """ + data = {"node_types": set(), "ids": {}} + ref_node = cmds.sets(container, query=True, nodesOnly=True)[0] + for node in cmds.referenceQuery(ref_node, nodes=True): + node_type = cmds.nodeType(node) + data["node_types"].add(node_type) + + # Only interested in mesh transforms for connecting geometry with + # blendshape. + if node_type != "mesh": + continue + + transform = cmds.listRelatives(node, parent=True)[0] + data["ids"][get_id(transform)] = transform + + return data + + def display_warning(self, message, show_cancel=False): + """Show feedback to user. + + Returns: + bool + """ + + from qtpy import QtWidgets + + accept = QtWidgets.QMessageBox.Ok + if show_cancel: + buttons = accept | QtWidgets.QMessageBox.Cancel + else: + buttons = accept + + state = QtWidgets.QMessageBox.warning( + None, + "", + message, + buttons=buttons, + defaultButton=accept + ) + + return state == accept diff --git a/client/ayon_maya/plugins/inventory/connect_xgen.py b/client/ayon_maya/plugins/inventory/connect_xgen.py new file mode 100644 index 00000000..166c4190 --- /dev/null +++ b/client/ayon_maya/plugins/inventory/connect_xgen.py @@ -0,0 +1,174 @@ +from maya import cmds +import xgenm + +from ayon_core.pipeline import ( + InventoryAction, + get_repres_contexts, + get_representation_path, +) + + +class ConnectXgen(InventoryAction): + """Connect Xgen with an animation or pointcache. + """ + + label = "Connect Xgen" + icon = "link" + color = "white" + + def process(self, containers): + # Validate selection is more than 1. + message = ( + "Only 1 container selected. 2+ containers needed for this action." + ) + if len(containers) == 1: + self.display_warning(message) + return + + # Categorize containers by product type. + containers_by_product_type = {} + repre_ids = { + container["representation"] + for container in containers + } + repre_contexts_by_id = get_repres_contexts(repre_ids) + for container in containers: + repre_id = container["representation"] + repre_context = repre_contexts_by_id[repre_id] + + product_type = repre_context["product"]["productType"] + + containers_by_product_type.setdefault(product_type, []) + containers_by_product_type[product_type].append(container) + + # Validate to only 1 source container. + source_containers = containers_by_product_type.get("animation", []) + source_containers += containers_by_product_type.get("pointcache", []) + source_container_namespaces = [ + x["namespace"] for x in source_containers + ] + message = ( + "{} animation containers selected:\n\n{}\n\nOnly select 1 of type " + "\"animation\" or \"pointcache\".".format( + len(source_containers), source_container_namespaces + ) + ) + if len(source_containers) != 1: + self.display_warning(message) + return + + source_container = source_containers[0] + source_repre_id = source_container["representation"] + source_object = source_container["objectName"] + + # Validate source representation is an alembic. + source_path = get_representation_path( + repre_contexts_by_id[source_repre_id]["representation"] + ).replace("\\", "/") + message = "Animation container \"{}\" is not an alembic:\n{}".format( + source_container["namespace"], source_path + ) + if not source_path.endswith(".abc"): + self.display_warning(message) + return + + # Target containers. + target_containers = [] + for product_type, containers in containers_by_product_type.items(): + if product_type in ["animation", "pointcache"]: + continue + + target_containers.extend(containers) + + # Inform user of connections from source representation to target + # descriptions. + descriptions_data = [] + connections_msg = "" + for target_container in target_containers: + reference_node = cmds.sets( + target_container["objectName"], query=True + )[0] + palettes = cmds.ls( + cmds.referenceQuery(reference_node, nodes=True), + type="xgmPalette" + ) + for palette in palettes: + for description in xgenm.descriptions(palette): + descriptions_data.append([palette, description]) + connections_msg += "\n{}/{}".format(palette, description) + + message = "Connecting \"{}\" to:\n".format( + source_container["namespace"] + ) + message += connections_msg + choice = self.display_warning(message, show_cancel=True) + if choice is False: + return + + # Recreate "xgenContainers" attribute to reset. + compound_name = "xgenContainers" + attr = "{}.{}".format(source_object, compound_name) + if cmds.objExists(attr): + cmds.deleteAttr(attr) + + cmds.addAttr( + source_object, + longName=compound_name, + attributeType="compound", + numberOfChildren=1, + multi=True + ) + + # Connect target containers. + for target_container in target_containers: + cmds.addAttr( + source_object, + longName="container", + attributeType="message", + parent=compound_name + ) + index = target_containers.index(target_container) + cmds.connectAttr( + target_container["objectName"] + ".message", + source_object + ".{}[{}].container".format( + compound_name, index + ) + ) + + # Setup cache on Xgen + object = "SplinePrimitive" + for palette, description in descriptions_data: + xgenm.setAttr("useCache", "true", palette, description, object) + xgenm.setAttr("liveMode", "false", palette, description, object) + xgenm.setAttr( + "cacheFileName", source_path, palette, description, object + ) + + # Refresh UI and viewport. + de = xgenm.xgGlobal.DescriptionEditor + de.refresh("Full") + + def display_warning(self, message, show_cancel=False): + """Show feedback to user. + + Returns: + bool + """ + + from qtpy import QtWidgets + + accept = QtWidgets.QMessageBox.Ok + if show_cancel: + buttons = accept | QtWidgets.QMessageBox.Cancel + else: + buttons = accept + + state = QtWidgets.QMessageBox.warning( + None, + "", + message, + buttons=buttons, + defaultButton=accept + ) + + return state == accept diff --git a/client/ayon_maya/plugins/inventory/connect_yeti_rig.py b/client/ayon_maya/plugins/inventory/connect_yeti_rig.py new file mode 100644 index 00000000..23854444 --- /dev/null +++ b/client/ayon_maya/plugins/inventory/connect_yeti_rig.py @@ -0,0 +1,187 @@ +import os +import json +from collections import defaultdict + +from maya import cmds + +from ayon_core.pipeline import ( + InventoryAction, + get_repres_contexts, + get_representation_path, +) +from ayon_maya.api.lib import get_container_members, get_id + + +class ConnectYetiRig(InventoryAction): + """Connect Yeti Rig with an animation or pointcache.""" + + label = "Connect Yeti Rig" + icon = "link" + color = "white" + + def process(self, containers): + # Validate selection is more than 1. + message = ( + "Only 1 container selected. 2+ containers needed for this action." + ) + if len(containers) == 1: + self.display_warning(message) + return + + # Categorize containers by product type. + containers_by_product_type = defaultdict(list) + repre_ids = { + container["representation"] + for container in containers + } + repre_contexts_by_id = get_repres_contexts(repre_ids) + for container in containers: + repre_id = container["representation"] + repre_context = repre_contexts_by_id[repre_id] + + product_type = repre_context["product"]["productType"] + + containers_by_product_type.setdefault(product_type, []) + containers_by_product_type[product_type].append(container) + + # Validate to only 1 source container. + source_containers = containers_by_product_type.get("animation", []) + source_containers += containers_by_product_type.get("pointcache", []) + source_container_namespaces = [ + x["namespace"] for x in source_containers + ] + message = ( + "{} animation containers selected:\n\n{}\n\nOnly select 1 of type " + "\"animation\" or \"pointcache\".".format( + len(source_containers), source_container_namespaces + ) + ) + if len(source_containers) != 1: + self.display_warning(message) + return + + source_container = source_containers[0] + source_ids = self.nodes_by_id(source_container) + + # Target containers. + target_ids = {} + inputs = [] + + yeti_rig_containers = containers_by_product_type.get("yetiRig") + if not yeti_rig_containers: + self.display_warning( + "Select at least one yetiRig container" + ) + return + + for container in yeti_rig_containers: + target_ids.update(self.nodes_by_id(container)) + repre_id = container["representation"] + + maya_file = get_representation_path( + repre_contexts_by_id[repre_id]["representation"] + ) + _, ext = os.path.splitext(maya_file) + settings_file = maya_file.replace(ext, ".rigsettings") + if not os.path.exists(settings_file): + continue + + with open(settings_file) as f: + inputs.extend(json.load(f)["inputs"]) + + # Compare loaded connections to scene. + for input in inputs: + source_node = source_ids.get(input["sourceID"]) + target_node = target_ids.get(input["destinationID"]) + + if not source_node or not target_node: + self.log.debug( + "Could not find nodes for input:\n" + + json.dumps(input, indent=4, sort_keys=True) + ) + continue + source_attr, target_attr = input["connections"] + + if not cmds.attributeQuery( + source_attr, node=source_node, exists=True + ): + self.log.debug( + "Could not find attribute {} on node {} for " + "input:\n{}".format( + source_attr, + source_node, + json.dumps(input, indent=4, sort_keys=True) + ) + ) + continue + + if not cmds.attributeQuery( + target_attr, node=target_node, exists=True + ): + self.log.debug( + "Could not find attribute {} on node {} for " + "input:\n{}".format( + target_attr, + target_node, + json.dumps(input, indent=4, sort_keys=True) + ) + ) + continue + + source_plug = "{}.{}".format( + source_node, source_attr + ) + target_plug = "{}.{}".format( + target_node, target_attr + ) + if cmds.isConnected( + source_plug, target_plug, ignoreUnitConversion=True + ): + self.log.debug( + "Connection already exists: {} -> {}".format( + source_plug, target_plug + ) + ) + continue + + cmds.connectAttr(source_plug, target_plug, force=True) + self.log.debug( + "Connected attributes: {} -> {}".format( + source_plug, target_plug + ) + ) + + def nodes_by_id(self, container): + ids = {} + for member in get_container_members(container): + id = get_id(member) + if not id: + continue + ids[id] = member + + return ids + + def display_warning(self, message, show_cancel=False): + """Show feedback to user. + + Returns: + bool + """ + + from qtpy import QtWidgets + + accept = QtWidgets.QMessageBox.Ok + if show_cancel: + buttons = accept | QtWidgets.QMessageBox.Cancel + else: + buttons = accept + + state = QtWidgets.QMessageBox.warning( + None, + "", + message, + buttons=buttons, + defaultButton=accept + ) + + return state == accept diff --git a/client/ayon_maya/plugins/inventory/import_modelrender.py b/client/ayon_maya/plugins/inventory/import_modelrender.py new file mode 100644 index 00000000..5e36ec6b --- /dev/null +++ b/client/ayon_maya/plugins/inventory/import_modelrender.py @@ -0,0 +1,169 @@ +import re +import json + +import ayon_api + +from ayon_core.pipeline.load import get_representation_contexts_by_ids +from ayon_core.pipeline import ( + InventoryAction, + get_current_project_name, +) +from ayon_maya.api.lib import ( + maintained_selection, + apply_shaders +) + + +class ImportModelRender(InventoryAction): + + label = "Import Model Render Sets" + icon = "industry" + color = "#55DDAA" + + scene_type_regex = "meta.render.m[ab]" + look_data_type = "meta.render.json" + + @staticmethod + def is_compatible(container): + return ( + container.get("loader") == "ReferenceLoader" + and container.get("name", "").startswith("model") + ) + + def process(self, containers): + from maya import cmds # noqa: F401 + + # --- Query entities that will be used --- + project_name = get_current_project_name() + # Collect representation ids from all containers + repre_ids = { + container["representation"] + for container in containers + } + # Create mapping of representation id to version id + # - used in containers loop + version_id_by_repre_id = { + repre_entity["id"]: repre_entity["versionId"] + for repre_entity in ayon_api.get_representations( + project_name, + representation_ids=repre_ids, + fields={"id", "versionId"} + ) + } + + # Find all representations of the versions + version_ids = set(version_id_by_repre_id.values()) + repre_entities = ayon_api.get_representations( + project_name, + version_ids=version_ids, + fields={"id", "name", "versionId"} + ) + repre_entities_by_version_id = { + version_id: [] + for version_id in version_ids + } + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + repre_entities_by_version_id[version_id].append(repre_entity) + + look_repres_by_version_id = {} + look_repre_ids = set() + for version_id, repre_entities in ( + repre_entities_by_version_id.items() + ): + json_repre = None + look_repres = [] + scene_type_regex = re.compile(self.scene_type_regex) + for repre_entity in repre_entities: + repre_name = repre_entity["name"] + if repre_name == self.look_data_type: + json_repre = repre_entity + + elif scene_type_regex.fullmatch(repre_name): + look_repres.append(repre_entity) + + look_repre = look_repres[0] if look_repres else None + if look_repre: + look_repre_ids.add(look_repre["id"]) + if json_repre: + look_repre_ids.add(json_repre["id"]) + + look_repres_by_version_id[version_id] = (json_repre, look_repre) + + contexts_by_repre_id = get_representation_contexts_by_ids( + project_name, look_repre_ids + ) + + # --- Real process logic --- + # Loop over containers and assign the looks + for container in containers: + con_name = container["objectName"] + nodes = [] + for n in cmds.sets(con_name, query=True, nodesOnly=True) or []: + if cmds.nodeType(n) == "reference": + nodes += cmds.referenceQuery(n, nodes=True) + else: + nodes.append(n) + + repre_id = container["representation"] + version_id = version_id_by_repre_id.get(repre_id) + if version_id is None: + print("Representation '{}' was not found".format(repre_id)) + continue + + json_repre, look_repre = look_repres_by_version_id[version_id] + + print("Importing render sets for model %r" % con_name) + self._assign_model_render( + nodes, json_repre, look_repre, contexts_by_repre_id + ) + + def _assign_model_render( + self, nodes, json_repre, look_repre, contexts_by_repre_id + ): + """Assign nodes a specific published model render data version by id. + + This assumes the nodes correspond with the asset. + + Args: + nodes (list): nodes to assign render data to + json_repre (dict[str, Any]): Representation entity of the json + file. + look_repre (dict[str, Any]): First representation entity of the + look files. + contexts_by_repre_id (dict[str, Any]): Mapping of representation + id to its context. + + Returns: + None + """ + + from maya import cmds # noqa: F401 + + # QUESTION shouldn't be json representation validated too? + if not look_repre: + print("No model render sets for this model version..") + return + + # TODO use 'get_representation_path_with_anatomy' instead + # of 'filepath_from_context' + context = contexts_by_repre_id.get(look_repre["id"]) + maya_file = self.filepath_from_context(context) + + context = contexts_by_repre_id.get(json_repre["id"]) + json_file = self.filepath_from_context(context) + + # Import the look file + with maintained_selection(): + shader_nodes = cmds.file(maya_file, + i=True, # import + returnNewNodes=True) + # imprint context data + + # Load relationships + shader_relation = json_file + with open(shader_relation, "r") as f: + relationships = json.load(f) + + # Assign relationships + apply_shaders(relationships, shader_nodes, nodes) diff --git a/client/ayon_maya/plugins/inventory/import_reference.py b/client/ayon_maya/plugins/inventory/import_reference.py new file mode 100644 index 00000000..5e42faca --- /dev/null +++ b/client/ayon_maya/plugins/inventory/import_reference.py @@ -0,0 +1,27 @@ +from maya import cmds + +from ayon_core.pipeline import InventoryAction +from ayon_maya.api.lib import get_reference_node + + +class ImportReference(InventoryAction): + """Imports selected reference to inside of the file.""" + + label = "Import Reference" + icon = "download" + color = "#d8d8d8" + + def process(self, containers): + for container in containers: + if container["loader"] != "ReferenceLoader": + print("Not a reference, skipping") + continue + + node = container["objectName"] + members = cmds.sets(node, query=True, nodesOnly=True) + ref_node = get_reference_node(members) + + ref_file = cmds.referenceQuery(ref_node, f=True) + cmds.file(ref_file, importReference=True) + + return True # return anything to trigger model refresh diff --git a/client/ayon_maya/plugins/inventory/rig_recreate_animation_instance.py b/client/ayon_maya/plugins/inventory/rig_recreate_animation_instance.py new file mode 100644 index 00000000..796a651f --- /dev/null +++ b/client/ayon_maya/plugins/inventory/rig_recreate_animation_instance.py @@ -0,0 +1,44 @@ +from ayon_core.pipeline import ( + InventoryAction, + get_current_project_name, +) +from ayon_core.pipeline.load import get_representation_contexts_by_ids +from ayon_maya.api.lib import ( + create_rig_animation_instance, + get_container_members, +) + + +class RecreateRigAnimationInstance(InventoryAction): + """Recreate animation publish instance for loaded rigs""" + + label = "Recreate rig animation instance" + icon = "wrench" + color = "#888888" + + @staticmethod + def is_compatible(container): + return ( + container.get("loader") == "ReferenceLoader" + and container.get("name", "").startswith("rig") + ) + + def process(self, containers): + project_name = get_current_project_name() + repre_ids = { + container["representation"] + for container in containers + } + contexts_by_repre_id = get_representation_contexts_by_ids( + project_name, repre_ids + ) + + for container in containers: + # todo: delete an existing entry if it exist or skip creation + + namespace = container["namespace"] + repre_id = container["representation"] + context = contexts_by_repre_id[repre_id] + nodes = get_container_members(container) + + create_rig_animation_instance(nodes, context, namespace) diff --git a/client/ayon_maya/plugins/inventory/select_containers.py b/client/ayon_maya/plugins/inventory/select_containers.py new file mode 100644 index 00000000..e45c8a57 --- /dev/null +++ b/client/ayon_maya/plugins/inventory/select_containers.py @@ -0,0 +1,46 @@ +from maya import cmds + +from ayon_core.pipeline import InventoryAction, registered_host +from ayon_maya.api.lib import get_container_members + + +class SelectInScene(InventoryAction): + """Select nodes in the scene from selected containers in scene inventory""" + + label = "Select in scene" + icon = "search" + color = "#888888" + order = 99 + + def process(self, containers): + + all_members = [] + for container in containers: + members = get_container_members(container) + all_members.extend(members) + cmds.select(all_members, replace=True, noExpand=True) + + +class HighlightBySceneSelection(InventoryAction): + """Select containers in scene inventory from the current scene selection""" + + label = "Highlight by scene selection" + icon = "search" + color = "#888888" + order = 100 + + def process(self, containers): + + selection = set(cmds.ls(selection=True, long=True, objectsOnly=True)) + host = registered_host() + + to_select = [] + for container in host.get_containers(): + members = get_container_members(container) + if any(member in selection for member in members): + to_select.append(container["objectName"]) + + return { + "objectNames": to_select, + "options": {"clear": True} + } diff --git a/client/ayon_maya/plugins/load/_load_animation.py b/client/ayon_maya/plugins/load/_load_animation.py new file mode 100644 index 00000000..6d4ebe25 --- /dev/null +++ b/client/ayon_maya/plugins/load/_load_animation.py @@ -0,0 +1,103 @@ +import ayon_maya.api.plugin +import maya.cmds as cmds + + +def _process_reference(file_url, name, namespace, options): + """Load files by referencing scene in Maya. + + Args: + file_url (str): fileapth of the objects to be loaded + name (str): product name + namespace (str): namespace + options (dict): dict of storing the param + + Returns: + list: list of object nodes + """ + from ayon_maya.api.lib import unique_namespace + # Get name from asset being loaded + # Assuming name is product name from the animation, we split the number + # suffix from the name to ensure the namespace is unique + name = name.split("_")[0] + ext = file_url.split(".")[-1] + namespace = unique_namespace( + "{}_".format(name), + format="%03d", + suffix="_{}".format(ext) + ) + + attach_to_root = options.get("attach_to_root", True) + group_name = options["group_name"] + + # no group shall be created + if not attach_to_root: + group_name = namespace + + nodes = cmds.file(file_url, + namespace=namespace, + sharedReferenceFile=False, + groupReference=attach_to_root, + groupName=group_name, + reference=True, + returnNewNodes=True) + return nodes + + +class AbcLoader(ayon_maya.api.plugin.ReferenceLoader): + """Loader to reference an Alembic file""" + + product_types = { + "animation", + "camera", + "pointcache", + } + representations = {"abc"} + + label = "Reference animation" + order = -10 + icon = "code-fork" + color = "orange" + + def process_reference(self, context, name, namespace, options): + + cmds.loadPlugin("AbcImport.mll", quiet=True) + # hero_001 (abc) + # asset_counter{optional} + path = self.filepath_from_context(context) + file_url = self.prepare_root_value(path, + context["project"]["name"]) + + nodes = _process_reference(file_url, name, namespace, options) + # load colorbleed ID attribute + self[:] = nodes + + return nodes + + +class FbxLoader(ayon_maya.api.plugin.ReferenceLoader): + """Loader to reference an Fbx files""" + + product_types = { + "animation", + "camera", + } + representations = {"fbx"} + + label = "Reference animation" + order = -10 + icon = "code-fork" + color = "orange" + + def process_reference(self, context, name, namespace, options): + + cmds.loadPlugin("fbx4maya.mll", quiet=True) + + path = self.filepath_from_context(context) + file_url = self.prepare_root_value(path, + context["project"]["name"]) + + nodes = _process_reference(file_url, name, namespace, options) + + self[:] = nodes + + return nodes diff --git a/client/ayon_maya/plugins/load/actions.py b/client/ayon_maya/plugins/load/actions.py new file mode 100644 index 00000000..d28645ea --- /dev/null +++ b/client/ayon_maya/plugins/load/actions.py @@ -0,0 +1,192 @@ +"""A module containing generic loader actions that will display in the Loader. + +""" +import qargparse +from ayon_core.pipeline import load +from ayon_maya.api.lib import ( + maintained_selection, + get_custom_namespace +) +import ayon_maya.api.plugin + + +class SetFrameRangeLoader(load.LoaderPlugin): + """Set frame range excluding pre- and post-handles""" + + product_types = { + "animation", + "camera", + "proxyAbc", + "pointcache", + } + representations = {"abc"} + + label = "Set frame range" + order = 11 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + import maya.cmds as cmds + + version_attributes = context["version"]["attrib"] + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") + + if start is None or end is None: + print("Skipping setting frame range because start or " + "end frame data is missing..") + return + + cmds.playbackOptions(minTime=start, + maxTime=end, + animationStartTime=start, + animationEndTime=end) + + +class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): + """Set frame range including pre- and post-handles""" + + product_types = { + "animation", + "camera", + "proxyAbc", + "pointcache", + } + representations = {"abc"} + + label = "Set frame range (with handles)" + order = 12 + icon = "clock-o" + color = "white" + + def load(self, context, name, namespace, data): + + import maya.cmds as cmds + + version_attributes = context["version"]["attrib"] + + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") + + if start is None or end is None: + print("Skipping setting frame range because start or " + "end frame data is missing..") + return + + # Include handles + start -= version_attributes.get("handleStart", 0) + end += version_attributes.get("handleEnd", 0) + + cmds.playbackOptions(minTime=start, + maxTime=end, + animationStartTime=start, + animationEndTime=end) + + +class ImportMayaLoader(ayon_maya.api.plugin.Loader): + """Import action for Maya (unmanaged) + + Warning: + The loaded content will be unmanaged and is *not* visible in the + scene inventory. It's purely intended to merge content into your scene + so you could also use it as a new base. + + """ + representations = {"ma", "mb", "obj"} + product_types = { + "model", + "pointcache", + "proxyAbc", + "animation", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "camera", + "rig", + "camerarig", + "staticMesh", + "workfile", + } + + label = "Import" + order = 10 + icon = "arrow-circle-down" + color = "#775555" + + options = [ + qargparse.Boolean( + "clean_import", + label="Clean import", + default=False, + help="Should all occurrences of cbId be purged?" + ) + ] + + @classmethod + def apply_settings(cls, project_settings): + super(ImportMayaLoader, cls).apply_settings(project_settings) + cls.enabled = cls.load_settings["import_loader"].get("enabled", True) + + def load(self, context, name=None, namespace=None, data=None): + import maya.cmds as cmds + + choice = self.display_warning() + if choice is False: + return + + custom_group_name, custom_namespace, options = \ + self.get_custom_namespace_and_group(context, data, + "import_loader") + + namespace = get_custom_namespace(custom_namespace) + + if not options.get("attach_to_root", True): + custom_group_name = namespace + + path = self.filepath_from_context(context) + with maintained_selection(): + nodes = cmds.file(path, + i=True, + preserveReferences=True, + namespace=namespace, + returnNewNodes=True, + groupReference=options.get("attach_to_root", + True), + groupName=custom_group_name) + + if data.get("clean_import", False): + remove_attributes = ["cbId"] + for node in nodes: + for attr in remove_attributes: + if cmds.attributeQuery(attr, node=node, exists=True): + full_attr = "{}.{}".format(node, attr) + print("Removing {}".format(full_attr)) + cmds.deleteAttr(full_attr) + + # We do not containerize imported content, it remains unmanaged + return + + def display_warning(self): + """Show warning to ensure the user can't import models by accident + + Returns: + bool + + """ + + from qtpy import QtWidgets + + accept = QtWidgets.QMessageBox.Ok + buttons = accept | QtWidgets.QMessageBox.Cancel + + message = "Are you sure you want import this" + state = QtWidgets.QMessageBox.warning(None, + "Are you sure?", + message, + buttons=buttons, + defaultButton=accept) + + return state == accept diff --git a/client/ayon_maya/plugins/load/load_arnold_standin.py b/client/ayon_maya/plugins/load/load_arnold_standin.py new file mode 100644 index 00000000..db81199e --- /dev/null +++ b/client/ayon_maya/plugins/load/load_arnold_standin.py @@ -0,0 +1,240 @@ +import os +import clique + +import maya.cmds as cmds + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.lib import ( + unique_namespace, + get_attribute_input, + maintained_selection, + get_fps_for_current_context +) +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.plugin import get_load_color_for_product_type + + +def is_sequence(files): + sequence = False + collections, remainder = clique.assemble(files, minimum_items=1) + if collections: + sequence = True + return sequence + + +class ArnoldStandinLoader(load.LoaderPlugin): + """Load as Arnold standin""" + + product_types = { + "ass", + "assProxy", + "animation", + "model", + "proxyAbc", + "pointcache", + "usd" + } + representations = {"ass", "abc", "usda", "usdc", "usd"} + + label = "Load as Arnold standin" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, options): + if not cmds.pluginInfo("mtoa", query=True, loaded=True): + cmds.loadPlugin("mtoa") + # Create defaultArnoldRenderOptions before creating aiStandin + # which tries to connect it. Since we load the plugin and directly + # create aiStandin without the defaultArnoldRenderOptions, + # we need to create the render options for aiStandin creation. + from mtoa.core import createOptions + createOptions() + + import mtoa.ui.arnoldmenu + + version_attributes = context["version"]["attrib"] + + self.log.info("version_attributes: {}\n".format(version_attributes)) + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Root group + label = "{}:{}".format(namespace, name) + root = cmds.group(name=label, empty=True) + + # Set color. + settings = get_project_settings(context["project"]["name"]) + color = get_load_color_for_product_type("ass", settings) + if color is not None: + red, green, blue = color + cmds.setAttr(root + ".useOutlinerColor", True) + cmds.setAttr( + root + ".outlinerColor", red, green, blue + ) + + with maintained_selection(): + # Create transform with shape + transform_name = label + "_standin" + + standin_shape = mtoa.ui.arnoldmenu.createStandIn() + standin = cmds.listRelatives(standin_shape, parent=True)[0] + standin = cmds.rename(standin, transform_name) + standin_shape = cmds.listRelatives(standin, shapes=True)[0] + + cmds.parent(standin, root) + + # Set the standin filepath + repre_path = self.filepath_from_context(context) + path, operator = self._setup_proxy( + standin_shape, repre_path, namespace + ) + cmds.setAttr(standin_shape + ".dso", path, type="string") + sequence = is_sequence(os.listdir(os.path.dirname(repre_path))) + cmds.setAttr(standin_shape + ".useFrameExtension", sequence) + + fps = ( + version_attributes.get("fps") or get_fps_for_current_context() + ) + cmds.setAttr(standin_shape + ".abcFPS", float(fps)) + + nodes = [root, standin, standin_shape] + if operator is not None: + nodes.append(operator) + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def get_next_free_multi_index(self, attr_name): + """Find the next unconnected multi index at the input attribute.""" + for index in range(10000000): + connection_info = cmds.connectionInfo( + "{}[{}]".format(attr_name, index), + sourceFromDestination=True + ) + if len(connection_info or []) == 0: + return index + + def _get_proxy_path(self, path): + basename_split = os.path.basename(path).split(".") + proxy_basename = ( + basename_split[0] + "_proxy." + ".".join(basename_split[1:]) + ) + proxy_path = "/".join([os.path.dirname(path), proxy_basename]) + return proxy_basename, proxy_path + + def _update_operators(self, string_replace_operator, proxy_basename, path): + cmds.setAttr( + string_replace_operator + ".match", + proxy_basename.split(".")[0], + type="string" + ) + cmds.setAttr( + string_replace_operator + ".replace", + os.path.basename(path).split(".")[0], + type="string" + ) + + def _setup_proxy(self, shape, path, namespace): + proxy_basename, proxy_path = self._get_proxy_path(path) + + options_node = "defaultArnoldRenderOptions" + merge_operator = get_attribute_input(options_node + ".operator") + if merge_operator is None: + merge_operator = cmds.createNode("aiMerge") + cmds.connectAttr( + merge_operator + ".message", options_node + ".operator" + ) + + merge_operator = merge_operator.split(".")[0] + + string_replace_operator = cmds.createNode( + "aiStringReplace", name=namespace + ":string_replace_operator" + ) + node_type = "alembic" if path.endswith(".abc") else "procedural" + cmds.setAttr( + string_replace_operator + ".selection", + "*.(@node=='{}')".format(node_type), + type="string" + ) + self._update_operators(string_replace_operator, proxy_basename, path) + + cmds.connectAttr( + string_replace_operator + ".out", + "{}.inputs[{}]".format( + merge_operator, + self.get_next_free_multi_index(merge_operator + ".inputs") + ) + ) + + # We setup the string operator no matter whether there is a proxy or + # not. This makes it easier to update since the string operator will + # always be created. Return original path to use for standin. + if not os.path.exists(proxy_path): + return path, string_replace_operator + + return proxy_path, string_replace_operator + + def update(self, container, context): + # Update the standin + members = cmds.sets(container['objectName'], query=True) + for member in members: + if cmds.nodeType(member) == "aiStringReplace": + string_replace_operator = member + + shapes = cmds.listRelatives(member, shapes=True) + if not shapes: + continue + if cmds.nodeType(shapes[0]) == "aiStandIn": + standin = shapes[0] + + repre_entity = context["representation"] + path = get_representation_path(repre_entity) + proxy_basename, proxy_path = self._get_proxy_path(path) + + # Whether there is proxy or not, we still update the string operator. + # If no proxy exists, the string operator won't replace anything. + self._update_operators(string_replace_operator, proxy_basename, path) + + dso_path = path + if os.path.exists(proxy_path): + dso_path = proxy_path + cmds.setAttr(standin + ".dso", dso_path, type="string") + + sequence = is_sequence(os.listdir(os.path.dirname(path))) + cmds.setAttr(standin + ".useFrameExtension", sequence) + + cmds.setAttr( + container["objectName"] + ".representation", + repre_entity["id"], + type="string" + ) + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass diff --git a/client/ayon_maya/plugins/load/load_as_template.py b/client/ayon_maya/plugins/load/load_as_template.py new file mode 100644 index 00000000..2a29faa7 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_as_template.py @@ -0,0 +1,39 @@ +from ayon_core.lib import ( + BoolDef +) +from ayon_core.pipeline import ( + load, + registered_host +) +from ayon_maya.api.workfile_template_builder import ( + MayaTemplateBuilder +) + + +class LoadAsTemplate(load.LoaderPlugin): + """Load workfile as a template """ + + product_types = {"workfile", "mayaScene"} + label = "Load as template" + representations = ["ma", "mb"] + icon = "wrench" + color = "#775555" + order = 10 + + options = [ + BoolDef("keep_placeholders", + label="Keep Placeholders", + default=False), + BoolDef("create_first_version", + label="Create First Version", + default=False), + ] + + def load(self, context, name, namespace, data): + keep_placeholders = data.get("keep_placeholders", False) + create_first_version = data.get("create_first_version", False) + path = self.filepath_from_context(context) + builder = MayaTemplateBuilder(registered_host()) + builder.build_template(template_path=path, + keep_placeholders=keep_placeholders, + create_first_version=create_first_version) diff --git a/client/ayon_maya/plugins/load/load_assembly.py b/client/ayon_maya/plugins/load/load_assembly.py new file mode 100644 index 00000000..b37bf141 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_assembly.py @@ -0,0 +1,79 @@ +import maya.cmds as cmds + +from ayon_core.pipeline import ( + load, + remove_container +) + +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.lib import unique_namespace +from ayon_maya.api import setdress + + +class AssemblyLoader(load.LoaderPlugin): + + product_types = {"assembly"} + representations = {"json"} + + label = "Load Set Dress" + order = -9 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, data): + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + containers = setdress.load_package( + filepath=self.filepath_from_context(context), + name=name, + namespace=namespace + ) + + self[:] = containers + + # Only containerize if any nodes were loaded by the Loader + nodes = self[:] + if not nodes: + return + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + + return setdress.update_package(container, context) + + def remove(self, container): + """Remove all sub containers""" + + # Remove all members + member_containers = setdress.get_contained_containers(container) + for member_container in member_containers: + self.log.info("Removing container %s", + member_container['objectName']) + remove_container(member_container) + + # Remove alembic hierarchy reference + # TODO: Check whether removing all contained references is safe enough + members = cmds.sets(container['objectName'], query=True) or [] + references = cmds.ls(members, type="reference") + for reference in references: + self.log.info("Removing %s", reference) + fname = cmds.referenceQuery(reference, filename=True) + cmds.file(fname, removeReference=True) + + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # TODO: Ensure namespace is gone diff --git a/client/ayon_maya/plugins/load/load_audio.py b/client/ayon_maya/plugins/load/load_audio.py new file mode 100644 index 00000000..81e49c61 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_audio.py @@ -0,0 +1,114 @@ +from maya import cmds, mel + +from ayon_core.pipeline import ( + load, + get_representation_path, +) +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.lib import unique_namespace, get_container_members + + +class AudioLoader(load.LoaderPlugin): + """Specific loader of audio.""" + + product_types = {"audio"} + label = "Load audio" + representations = {"wav"} + icon = "volume-up" + color = "orange" + + def load(self, context, name, namespace, data): + + start_frame = cmds.playbackOptions(query=True, min=True) + sound_node = cmds.sound( + file=self.filepath_from_context(context), offset=start_frame + ) + cmds.timeControl( + mel.eval("$gPlayBackSlider=$gPlayBackSlider"), + edit=True, + sound=sound_node, + displaySound=True + ) + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + return containerise( + name=name, + namespace=namespace, + nodes=[sound_node], + context=context, + loader=self.__class__.__name__ + ) + + def update(self, container, context): + repre_entity = context["representation"] + + members = get_container_members(container) + audio_nodes = cmds.ls(members, type="audio") + + assert audio_nodes is not None, "Audio node not found." + audio_node = audio_nodes[0] + + current_sound = cmds.timeControl( + mel.eval("$gPlayBackSlider=$gPlayBackSlider"), + query=True, + sound=True + ) + activate_sound = current_sound == audio_node + + path = get_representation_path(repre_entity) + + cmds.sound( + audio_node, + edit=True, + file=path + ) + + # The source start + end does not automatically update itself to the + # length of thew new audio file, even though maya does do that when + # creating a new audio node. So to update we compute it manually. + # This would however override any source start and source end a user + # might have done on the original audio node after load. + audio_frame_count = cmds.getAttr("{}.frameCount".format(audio_node)) + audio_sample_rate = cmds.getAttr("{}.sampleRate".format(audio_node)) + duration_in_seconds = audio_frame_count / audio_sample_rate + fps = mel.eval('currentTimeUnitToFPS()') # workfile FPS + source_start = 0 + source_end = (duration_in_seconds * fps) + cmds.setAttr("{}.sourceStart".format(audio_node), source_start) + cmds.setAttr("{}.sourceEnd".format(audio_node), source_end) + + if activate_sound: + # maya by default deactivates it from timeline on file change + cmds.timeControl( + mel.eval("$gPlayBackSlider=$gPlayBackSlider"), + edit=True, + sound=audio_node, + displaySound=True + ) + + cmds.setAttr( + container["objectName"] + ".representation", + repre_entity["id"], + type="string" + ) + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass diff --git a/client/ayon_maya/plugins/load/load_gpucache.py b/client/ayon_maya/plugins/load/load_gpucache.py new file mode 100644 index 00000000..fb6db94c --- /dev/null +++ b/client/ayon_maya/plugins/load/load_gpucache.py @@ -0,0 +1,104 @@ +import maya.cmds as cmds + +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.lib import unique_namespace +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_core.settings import get_project_settings +from ayon_maya.api.plugin import get_load_color_for_product_type + + +class GpuCacheLoader(load.LoaderPlugin): + """Load Alembic as gpuCache""" + + product_types = {"model", "animation", "proxyAbc", "pointcache"} + representations = {"abc", "gpu_cache"} + + label = "Load Gpu Cache" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, data): + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + cmds.loadPlugin("gpuCache", quiet=True) + + # Root group + label = "{}:{}".format(namespace, name) + root = cmds.group(name=label, empty=True) + + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type("model", settings) + if color is not None: + red, green, blue = color + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr( + root + ".outlinerColor", red, green, blue + ) + + # Create transform with shape + transform_name = label + "_GPU" + transform = cmds.createNode("transform", name=transform_name, + parent=root) + cache = cmds.createNode("gpuCache", + parent=transform, + name="{0}Shape".format(transform_name)) + + # Set the cache filepath + path = self.filepath_from_context(context) + cmds.setAttr(cache + '.cacheFileName', path, type="string") + cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root + + # Lock parenting of the transform and cache + cmds.lockNode([transform, cache], lock=True) + + nodes = [root, transform, cache] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + repre_entity = context["representation"] + path = get_representation_path(repre_entity) + + # Update the cache + members = cmds.sets(container['objectName'], query=True) + caches = cmds.ls(members, type="gpuCache", long=True) + + assert len(caches) == 1, "This is a bug" + + for cache in caches: + cmds.setAttr(cache + ".cacheFileName", path, type="string") + + cmds.setAttr(container["objectName"] + ".representation", + repre_entity["id"], + type="string") + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass diff --git a/client/ayon_maya/plugins/load/load_image.py b/client/ayon_maya/plugins/load/load_image.py new file mode 100644 index 00000000..28b44ac9 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_image.py @@ -0,0 +1,337 @@ +import copy + +from ayon_core.lib import EnumDef +from ayon_core.pipeline import ( + load, + get_current_host_name, +) +from ayon_core.pipeline.load.utils import get_representation_path_from_context +from ayon_core.pipeline.colorspace import ( + get_imageio_file_rules_colorspace_from_filepath, + get_current_context_imageio_config_preset, + get_imageio_file_rules +) +from ayon_core.settings import get_project_settings + +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.lib import ( + unique_namespace, + namespaced +) + +from maya import cmds + + +def create_texture(): + """Create place2dTexture with file node with uv connections + + Mimics Maya "file [Texture]" creation. + """ + + place = cmds.shadingNode("place2dTexture", asUtility=True, name="place2d") + file = cmds.shadingNode("file", asTexture=True, name="file") + + connections = ["coverage", "translateFrame", "rotateFrame", "rotateUV", + "mirrorU", "mirrorV", "stagger", "wrapV", "wrapU", + "repeatUV", "offset", "noiseUV", "vertexUvThree", + "vertexUvTwo", "vertexUvOne", "vertexCameraOne"] + for attr in connections: + src = "{}.{}".format(place, attr) + dest = "{}.{}".format(file, attr) + cmds.connectAttr(src, dest) + + cmds.connectAttr(place + '.outUV', file + '.uvCoord') + cmds.connectAttr(place + '.outUvFilterSize', file + '.uvFilterSize') + + return file, place + + +def create_projection(): + """Create texture with place3dTexture and projection + + Mimics Maya "file [Projection]" creation. + """ + + file, place = create_texture() + projection = cmds.shadingNode("projection", asTexture=True, + name="projection") + place3d = cmds.shadingNode("place3dTexture", asUtility=True, + name="place3d") + + cmds.connectAttr(place3d + '.worldInverseMatrix[0]', + projection + ".placementMatrix") + cmds.connectAttr(file + '.outColor', projection + ".image") + + return file, place, projection, place3d + + +def create_stencil(): + """Create texture with extra place2dTexture offset and stencil + + Mimics Maya "file [Stencil]" creation. + """ + + file, place = create_texture() + + place_stencil = cmds.shadingNode("place2dTexture", asUtility=True, + name="place2d_stencil") + stencil = cmds.shadingNode("stencil", asTexture=True, name="stencil") + + for src_attr, dest_attr in [ + ("outUV", "uvCoord"), + ("outUvFilterSize", "uvFilterSize") + ]: + src_plug = "{}.{}".format(place_stencil, src_attr) + cmds.connectAttr(src_plug, "{}.{}".format(place, dest_attr)) + cmds.connectAttr(src_plug, "{}.{}".format(stencil, dest_attr)) + + return file, place, stencil, place_stencil + + +class FileNodeLoader(load.LoaderPlugin): + """File node loader.""" + + product_types = {"image", "plate", "render"} + label = "Load file node" + representations = {"exr", "tif", "png", "jpg"} + icon = "image" + color = "orange" + order = 2 + + options = [ + EnumDef( + "mode", + items={ + "texture": "Texture", + "projection": "Projection", + "stencil": "Stencil" + }, + default="texture", + label="Texture Mode" + ) + ] + + def load(self, context, name, namespace, data): + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + with namespaced(namespace, new=True) as namespace: + # Create the nodes within the namespace + nodes = { + "texture": create_texture, + "projection": create_projection, + "stencil": create_stencil + }[data.get("mode", "texture")]() + + file_node = cmds.ls(nodes, type="file")[0] + + self._apply_representation_context(context, file_node) + + # For ease of access for the user select all the nodes and select + # the file node last so that UI shows its attributes by default + cmds.select(list(nodes) + [file_node], replace=True) + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__ + ) + + def update(self, container, context): + repre_entity = context["representation"] + + members = cmds.sets(container['objectName'], query=True) + file_node = cmds.ls(members, type="file")[0] + + self._apply_representation_context(context, file_node) + + # Update representation + cmds.setAttr( + container["objectName"] + ".representation", + repre_entity["id"], + type="string" + ) + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass + + def _apply_representation_context(self, context, file_node): + """Update the file node to match the context. + + This sets the file node's attributes for: + - file path + - udim tiling mode (if it is an udim tile) + - use frame extension (if it is a sequence) + - colorspace + + """ + + repre_context = context["representation"]["context"] + has_frames = repre_context.get("frame") is not None + has_udim = repre_context.get("udim") is not None + + # Set UV tiling mode if UDIM tiles + if has_udim: + cmds.setAttr(file_node + ".uvTilingMode", 3) # UDIM-tiles + else: + cmds.setAttr(file_node + ".uvTilingMode", 0) # off + + # Enable sequence if publish has `startFrame` and `endFrame` and + # `startFrame != endFrame` + if has_frames and self._is_sequence(context): + # When enabling useFrameExtension maya automatically + # connects an expression to .frameExtension to set + # the current frame. However, this expression is generated + # with some delay and thus it'll show a warning if frame 0 + # doesn't exist because we're explicitly setting the + # token. + cmds.setAttr(file_node + ".useFrameExtension", True) + else: + cmds.setAttr(file_node + ".useFrameExtension", False) + + # Set the file node path attribute + path = self._format_path(context) + cmds.setAttr(file_node + ".fileTextureName", path, type="string") + + # Set colorspace + colorspace = self._get_colorspace(context) + if colorspace: + cmds.setAttr(file_node + ".colorSpace", colorspace, type="string") + else: + self.log.debug("Unknown colorspace - setting colorspace skipped.") + + def _is_sequence(self, context): + """Check whether frameStart and frameEnd are not the same.""" + version = context["version"] + representation = context["representation"] + + # TODO this is invalid logic, it should be based only on + # representation entity + for entity in [representation, version]: + # Frame range can be set on version or representation. + # When set on representation it overrides version data. + attributes = entity["attrib"] + data = entity["data"] + start = data.get("frameStartHandle", attributes.get("frameStart")) + end = data.get("frameEndHandle", attributes.get("frameEnd")) + + if start is None or end is None: + continue + + if start != end: + return True + else: + return False + + return False + + def _get_colorspace(self, context): + """Return colorspace of the file to load. + + Retrieves the explicit colorspace from the publish. If no colorspace + data is stored with published content then project imageio settings + are used to make an assumption of the colorspace based on the file + rules. If no file rules match then None is returned. + + Returns: + str or None: The colorspace of the file or None if not detected. + + """ + + # We can't apply color spaces if management is not enabled + if not cmds.colorManagementPrefs(query=True, cmEnabled=True): + return + + representation = context["representation"] + colorspace_data = representation.get("data", {}).get("colorspaceData") + if colorspace_data: + return colorspace_data["colorspace"] + + # Assume colorspace from filepath based on project settings + project_name = context["project"]["name"] + host_name = get_current_host_name() + project_settings = get_project_settings(project_name) + + config_data = get_current_context_imageio_config_preset( + project_settings=project_settings + ) + + # ignore if host imageio is not enabled + if not config_data: + return + + file_rules = get_imageio_file_rules( + project_name, host_name, + project_settings=project_settings + ) + + path = get_representation_path_from_context(context) + colorspace = get_imageio_file_rules_colorspace_from_filepath( + path, + host_name, + project_name, + config_data=config_data, + file_rules=file_rules, + project_settings=project_settings + ) + + return colorspace + + def _format_path(self, context): + """Format the path with correct tokens for frames and udim tiles.""" + + context = copy.deepcopy(context) + representation = context["representation"] + template = representation.get("attrib", {}).get("template") + if not template: + # No template to find token locations for + return get_representation_path_from_context(context) + + def _placeholder(key): + # Substitute with a long placeholder value so that potential + # custom formatting with padding doesn't find its way into + # our formatting, so that wouldn't be padded as 0 + return "___{}___".format(key) + + # We format UDIM and Frame numbers with their specific tokens. To do so + # we in-place change the representation context data to format the path + # with our own data + tokens = { + "frame": "", + "udim": "" + } + has_tokens = False + repre_context = representation["context"] + for key, _token in tokens.items(): + if key in repre_context: + repre_context[key] = _placeholder(key) + has_tokens = True + + # Replace with our custom template that has the tokens set + representation["attrib"]["template"] = template + path = get_representation_path_from_context(context) + + if has_tokens: + for key, token in tokens.items(): + if key in repre_context: + path = path.replace(_placeholder(key), token) + + return path diff --git a/client/ayon_maya/plugins/load/load_image_plane.py b/client/ayon_maya/plugins/load/load_image_plane.py new file mode 100644 index 00000000..2740f106 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_image_plane.py @@ -0,0 +1,275 @@ +from qtpy import QtWidgets, QtCore + +from ayon_core.pipeline import ( + load, + get_representation_path, +) +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.lib import ( + unique_namespace, + namespaced, + pairwise, + get_container_members +) + +from maya import cmds + + +def disconnect_inputs(plug): + overrides = cmds.listConnections(plug, + source=True, + destination=False, + plugs=True, + connections=True) or [] + for dest, src in pairwise(overrides): + cmds.disconnectAttr(src, dest) + + +class CameraWindow(QtWidgets.QDialog): + + def __init__(self, cameras): + super(CameraWindow, self).__init__() + self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) + + self.camera = None + + self.widgets = { + "label": QtWidgets.QLabel("Select camera for image plane."), + "list": QtWidgets.QListWidget(), + "staticImagePlane": QtWidgets.QCheckBox(), + "showInAllViews": QtWidgets.QCheckBox(), + "warning": QtWidgets.QLabel("No cameras selected!"), + "buttons": QtWidgets.QWidget(), + "okButton": QtWidgets.QPushButton("Ok"), + "cancelButton": QtWidgets.QPushButton("Cancel") + } + + # Build warning. + self.widgets["warning"].setVisible(False) + self.widgets["warning"].setStyleSheet("color: red") + + # Build list. + for camera in cameras: + self.widgets["list"].addItem(camera) + + + # Build buttons. + layout = QtWidgets.QHBoxLayout(self.widgets["buttons"]) + layout.addWidget(self.widgets["okButton"]) + layout.addWidget(self.widgets["cancelButton"]) + + # Build layout. + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(self.widgets["label"]) + layout.addWidget(self.widgets["list"]) + layout.addWidget(self.widgets["buttons"]) + layout.addWidget(self.widgets["warning"]) + + self.widgets["okButton"].pressed.connect(self.on_ok_pressed) + self.widgets["cancelButton"].pressed.connect(self.on_cancel_pressed) + self.widgets["list"].itemPressed.connect(self.on_list_itemPressed) + + def on_list_itemPressed(self, item): + self.camera = item.text() + + def on_ok_pressed(self): + if self.camera is None: + self.widgets["warning"].setVisible(True) + return + + self.close() + + def on_cancel_pressed(self): + self.camera = None + self.close() + + +class ImagePlaneLoader(load.LoaderPlugin): + """Specific loader of plate for image planes on selected camera.""" + + product_types = {"image", "plate", "render"} + label = "Load imagePlane" + representations = {"mov", "exr", "preview", "png", "jpg"} + icon = "image" + color = "orange" + + def load(self, context, name, namespace, data, options=None): + + image_plane_depth = 1000 + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Get camera from user selection. + # is_static_image_plane = None + # is_in_all_views = None + camera = data.get("camera") if data else None + + if not camera: + cameras = cmds.ls(type="camera") + + # Cameras by names + camera_names = {} + for camera in cameras: + parent = cmds.listRelatives(camera, parent=True, path=True)[0] + camera_names[parent] = camera + + camera_names["Create new camera."] = "create-camera" + window = CameraWindow(camera_names.keys()) + window.exec_() + # Skip if no camera was selected (Dialog was closed) + if window.camera not in camera_names: + return + camera = camera_names[window.camera] + + if camera == "create-camera": + camera = cmds.createNode("camera") + + if camera is None: + return + + try: + cmds.setAttr("{}.displayResolution".format(camera), True) + cmds.setAttr("{}.farClipPlane".format(camera), + image_plane_depth * 10) + except RuntimeError: + pass + + # Create image plane + with namespaced(namespace): + # Create inside the namespace + image_plane_transform, image_plane_shape = cmds.imagePlane( + fileName=self.filepath_from_context(context), + camera=camera + ) + + # Set colorspace + colorspace = self.get_colorspace(context["representation"]) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + + # Set offset frame range + start_frame = cmds.playbackOptions(query=True, min=True) + end_frame = cmds.playbackOptions(query=True, max=True) + + for attr, value in { + "depth": image_plane_depth, + "frameOffset": 0, + "frameIn": start_frame, + "frameOut": end_frame, + "frameCache": end_frame, + "useFrameExtension": True + }.items(): + plug = "{}.{}".format(image_plane_shape, attr) + cmds.setAttr(plug, value) + + movie_representations = {"mov", "preview"} + if context["representation"]["name"] in movie_representations: + cmds.setAttr(image_plane_shape + ".type", 2) + + # Ask user whether to use sequence or still image. + if context["representation"]["name"] == "exr": + # Ensure OpenEXRLoader plugin is loaded. + cmds.loadPlugin("OpenEXRLoader", quiet=True) + + message = ( + "Hold image sequence on first frame?" + "\n{} files available.".format( + len(context["representation"]["files"]) + ) + ) + reply = QtWidgets.QMessageBox.information( + None, + "Frame Hold.", + message, + QtWidgets.QMessageBox.Yes, + QtWidgets.QMessageBox.No + ) + if reply == QtWidgets.QMessageBox.Yes: + frame_extension_plug = "{}.frameExtension".format(image_plane_shape) # noqa + + # Remove current frame expression + disconnect_inputs(frame_extension_plug) + + cmds.setAttr(frame_extension_plug, start_frame) + + new_nodes = [image_plane_transform, image_plane_shape] + + return containerise( + name=name, + namespace=namespace, + nodes=new_nodes, + context=context, + loader=self.__class__.__name__ + ) + + def update(self, container, context): + folder_entity = context["folder"] + repre_entity = context["representation"] + + members = get_container_members(container) + image_planes = cmds.ls(members, type="imagePlane") + assert image_planes, "Image plane not found." + image_plane_shape = image_planes[0] + + path = get_representation_path(repre_entity) + cmds.setAttr("{}.imageName".format(image_plane_shape), + path, + type="string") + cmds.setAttr("{}.representation".format(container["objectName"]), + repre_entity["id"], + type="string") + + colorspace = self.get_colorspace(repre_entity) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + + # Set frame range. + start_frame = folder_entity["attrib"]["frameStart"] + end_frame = folder_entity["attrib"]["frameEnd"] + + for attr, value in { + "frameOffset": 0, + "frameIn": start_frame, + "frameOut": end_frame, + "frameCache": end_frame + }: + plug = "{}.{}".format(image_plane_shape, attr) + cmds.setAttr(plug, value) + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass + + def get_colorspace(self, representation): + + data = representation.get("data", {}).get("colorspaceData", {}) + if not data: + return + + colorspace = data.get("colorspace") + return colorspace diff --git a/client/ayon_maya/plugins/load/load_look.py b/client/ayon_maya/plugins/load/load_look.py new file mode 100644 index 00000000..f61d0e9c --- /dev/null +++ b/client/ayon_maya/plugins/load/load_look.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +"""Look loader.""" +import json +from collections import defaultdict + +from qtpy import QtWidgets +from ayon_api import get_representation_by_name + +from ayon_core.pipeline import get_representation_path +import ayon_maya.api.plugin +from ayon_maya.api import lib +from ayon_maya.api.lib import get_reference_node + +from ayon_core.tools.utils import ScrollMessageBox + + +class LookLoader(ayon_maya.api.plugin.ReferenceLoader): + """Specific loader for lookdev""" + + product_types = {"look"} + representations = {"ma"} + + label = "Reference look" + order = -10 + icon = "code-fork" + color = "orange" + + def process_reference(self, context, name, namespace, options): + from maya import cmds + + with lib.maintained_selection(): + file_url = self.prepare_root_value( + file_url=self.filepath_from_context(context), + project_name=context["project"]["name"] + ) + nodes = cmds.file(file_url, + namespace=namespace, + reference=True, + returnNewNodes=True) + + self[:] = nodes + + def switch(self, container, context): + self.update(container, context) + + def update(self, container, context): + """ + Called by Scene Inventory when look should be updated to current + version. + If any reference edits cannot be applied, eg. shader renamed and + material not present, reference is unloaded and cleaned. + All failed edits are highlighted to the user via message box. + + Args: + container: object that has look to be updated + context: (dict): relationship data to get proper + representation from DB and persisted + data in .json + Returns: + None + """ + from maya import cmds + + # Get reference node from container members + members = lib.get_container_members(container) + reference_node = get_reference_node(members, log=self.log) + + shader_nodes = cmds.ls(members, type='shadingEngine') + orig_nodes = set(self._get_nodes_with_shader(shader_nodes)) + + # Trigger the regular reference update on the ReferenceLoader + super(LookLoader, self).update(container, context) + + # get new applied shaders and nodes from new version + shader_nodes = cmds.ls(members, type='shadingEngine') + nodes = set(self._get_nodes_with_shader(shader_nodes)) + + version_id = context["version"]["id"] + project_name = context["project"]["name"] + json_representation = get_representation_by_name( + project_name, "json", version_id + ) + + # Load relationships + shader_relation = get_representation_path(json_representation) + with open(shader_relation, "r") as f: + json_data = json.load(f) + + # update of reference could result in failed edits - material is not + # present because of renaming etc. If so highlight failed edits to user + failed_edits = cmds.referenceQuery(reference_node, + editStrings=True, + failedEdits=True, + successfulEdits=False) + if failed_edits: + # clean references - removes failed reference edits + cmds.file(cr=reference_node) # cleanReference + + # reapply shading groups from json representation on orig nodes + lib.apply_shaders(json_data, shader_nodes, orig_nodes) + + msg = ["During reference update some edits failed.", + "All successful edits were kept intact.\n", + "Failed and removed edits:"] + msg.extend(failed_edits) + + msg = ScrollMessageBox(QtWidgets.QMessageBox.Warning, + "Some reference edit failed", + msg) + msg.exec_() + + attributes = json_data.get("attributes", []) + + # region compute lookup + nodes_by_id = defaultdict(list) + for node in nodes: + nodes_by_id[lib.get_id(node)].append(node) + lib.apply_attributes(attributes, nodes_by_id) + + def _get_nodes_with_shader(self, shader_nodes): + """ + Returns list of nodes belonging to specific shaders + Args: + shader_nodes: of Shader groups + Returns + node names + """ + from maya import cmds + + for shader in shader_nodes: + future = cmds.listHistory(shader, future=True) + connections = cmds.listConnections(future, + type='mesh') + if connections: + # Ensure unique entries only to optimize query and results + connections = list(set(connections)) + return cmds.listRelatives(connections, + shapes=True, + fullPath=True) or [] + return [] diff --git a/client/ayon_maya/plugins/load/load_matchmove.py b/client/ayon_maya/plugins/load/load_matchmove.py new file mode 100644 index 00000000..b19b14b1 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_matchmove.py @@ -0,0 +1,30 @@ +from maya import mel +from ayon_core.pipeline import load + +class MatchmoveLoader(load.LoaderPlugin): + """ + This will run matchmove script to create track in scene. + + Supported script types are .py and .mel + """ + + product_types = {"matchmove"} + representations = {"py", "mel"} + defaults = ["Camera", "Object", "Mocap"] + + label = "Run matchmove script" + icon = "empire" + color = "orange" + + def load(self, context, name, namespace, data): + path = self.filepath_from_context(context) + if path.lower().endswith(".py"): + exec(open(path).read()) + + elif path.lower().endswith(".mel"): + mel.eval('source "{}"'.format(path)) + + else: + self.log.error("Unsupported script type") + + return True diff --git a/client/ayon_maya/plugins/load/load_maya_usd.py b/client/ayon_maya/plugins/load/load_maya_usd.py new file mode 100644 index 00000000..6c2945f4 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_maya_usd.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +import maya.cmds as cmds + +from ayon_core.pipeline import ( + load, + get_representation_path, +) +from ayon_core.pipeline.load import get_representation_path_from_context +from ayon_maya.api.lib import ( + namespaced, + unique_namespace +) +from ayon_maya.api.pipeline import containerise + + +class MayaUsdLoader(load.LoaderPlugin): + """Read USD data in a Maya USD Proxy""" + + product_types = {"model", "usd", "pointcache", "animation"} + representations = {"usd", "usda", "usdc", "usdz", "abc"} + + label = "Load USD to Maya Proxy" + order = -1 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, options=None): + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Make sure we can load the plugin + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + path = get_representation_path_from_context(context) + + # Create the shape + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + transform = cmds.createNode("transform", + name=name, + skipSelect=True) + proxy = cmds.createNode('mayaUsdProxyShape', + name="{}Shape".format(name), + parent=transform, + skipSelect=True) + + cmds.connectAttr("time1.outTime", "{}.time".format(proxy)) + cmds.setAttr("{}.filePath".format(proxy), path, type="string") + + # By default, we force the proxy to not use a shared stage because + # when doing so Maya will quite easily allow to save into the + # loaded usd file. Since we are loading published files we want to + # avoid altering them. Unshared stages also save their edits into + # the workfile as an artist might expect it to do. + cmds.setAttr("{}.shareStage".format(proxy), False) + # cmds.setAttr("{}.shareStage".format(proxy), lock=True) + + nodes = [transform, proxy] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + # type: (dict, dict) -> None + """Update container with specified representation.""" + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + shapes = cmds.ls(members, type="mayaUsdProxyShape") + + repre_entity = context["representation"] + path = get_representation_path(repre_entity) + for shape in shapes: + cmds.setAttr("{}.filePath".format(shape), path, type="string") + + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) diff --git a/client/ayon_maya/plugins/load/load_multiverse_usd.py b/client/ayon_maya/plugins/load/load_multiverse_usd.py new file mode 100644 index 00000000..93bb9509 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_multiverse_usd.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +import maya.cmds as cmds +from maya import mel +import os + +from ayon_api import get_representation_by_id + +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.lib import ( + maintained_selection, + namespaced, + unique_namespace +) +from ayon_maya.api.pipeline import containerise + + +class MultiverseUsdLoader(load.LoaderPlugin): + """Read USD data in a Multiverse Compound""" + + product_types = { + "model", + "usd", + "mvUsdComposition", + "mvUsdOverride", + "pointcache", + "animation", + } + representations = {"usd", "usda", "usdc", "usdz", "abc"} + + label = "Load USD to Multiverse" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, options=None): + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + path = self.filepath_from_context(context) + + # Make sure we can load the plugin + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + # Create the shape + with maintained_selection(): + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + shape = multiverse.CreateUsdCompound(path) + transform = cmds.listRelatives( + shape, parent=True, fullPath=True)[0] + + nodes = [transform, shape] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + # type: (dict, dict) -> None + """Update container with specified representation.""" + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + shapes = cmds.ls(members, type="mvUsdCompoundShape") + assert shapes, "Cannot find mvUsdCompoundShape in container" + + project_name = context["project"]["name"] + repre_entity = context["representation"] + path = get_representation_path(repre_entity) + prev_representation_id = cmds.getAttr("{}.representation".format(node)) + prev_representation = get_representation_by_id(project_name, + prev_representation_id) + prev_path = os.path.normpath(prev_representation["attrib"]["path"]) + + # Make sure we can load the plugin + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + for shape in shapes: + + asset_paths = multiverse.GetUsdCompoundAssetPaths(shape) + asset_paths = [os.path.normpath(p) for p in asset_paths] + + assert asset_paths.count(prev_path) == 1, \ + "Couldn't find matching path (or too many)" + prev_path_idx = asset_paths.index(prev_path) + + asset_paths[prev_path_idx] = path + + multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) + + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + mel.eval('refreshEditorTemplates;') + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) diff --git a/client/ayon_maya/plugins/load/load_multiverse_usd_over.py b/client/ayon_maya/plugins/load/load_multiverse_usd_over.py new file mode 100644 index 00000000..e5135388 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_multiverse_usd_over.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +import maya.cmds as cmds +from maya import mel +import os + +import qargparse +from ayon_api import get_representation_by_id + +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.lib import ( + maintained_selection +) +from ayon_maya.api.pipeline import containerise + + +class MultiverseUsdOverLoader(load.LoaderPlugin): + """Reference file""" + + product_types = {"mvUsdOverride"} + representations = {"usda", "usd", "udsz"} + + label = "Load Usd Override into Compound" + order = -10 + icon = "code-fork" + color = "orange" + + options = [ + qargparse.String( + "Which Compound", + label="Compound", + help="Select which compound to add this as a layer to." + ) + ] + + def load(self, context, name=None, namespace=None, options=None): + current_usd = cmds.ls(selection=True, + type="mvUsdCompoundShape", + dag=True, + long=True) + if len(current_usd) != 1: + self.log.error("Current selection invalid: '{}', " + "must contain exactly 1 mvUsdCompoundShape." + "".format(current_usd)) + return + + # Make sure we can load the plugin + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + path = self.filepath_from_context(context) + nodes = current_usd + with maintained_selection(): + multiverse.AddUsdCompoundAssetPath(current_usd[0], path) + + namespace = current_usd[0].split("|")[1].split(":")[0] + + container = containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + cmds.addAttr(container, longName="mvUsdCompoundShape", + niceName="mvUsdCompoundShape", dataType="string") + cmds.setAttr(container + ".mvUsdCompoundShape", + current_usd[0], type="string") + + return container + + def update(self, container, context): + # type: (dict, dict) -> None + """Update container with specified representation.""" + + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + shapes = cmds.ls(members, type="mvUsdCompoundShape") + assert shapes, "Cannot find mvUsdCompoundShape in container" + + mvShape = container['mvUsdCompoundShape'] + assert mvShape, "Missing mv source" + + project_name = context["project"]["name"] + repre_entity = context["representation"] + prev_representation_id = cmds.getAttr("{}.representation".format(node)) + prev_representation = get_representation_by_id(project_name, + prev_representation_id) + prev_path = os.path.normpath(prev_representation["attrib"]["path"]) + + path = get_representation_path(repre_entity) + + for shape in shapes: + asset_paths = multiverse.GetUsdCompoundAssetPaths(shape) + asset_paths = [os.path.normpath(p) for p in asset_paths] + + assert asset_paths.count(prev_path) == 1, \ + "Couldn't find matching path (or too many)" + prev_path_idx = asset_paths.index(prev_path) + asset_paths[prev_path_idx] = path + multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) + + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + mel.eval('refreshEditorTemplates;') + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) diff --git a/client/ayon_maya/plugins/load/load_redshift_proxy.py b/client/ayon_maya/plugins/load/load_redshift_proxy.py new file mode 100644 index 00000000..fba6314f --- /dev/null +++ b/client/ayon_maya/plugins/load/load_redshift_proxy.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +"""Loader for Redshift proxy.""" +import os +import clique + +import maya.cmds as cmds + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.lib import ( + namespaced, + maintained_selection, + unique_namespace +) +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.plugin import get_load_color_for_product_type + + +class RedshiftProxyLoader(load.LoaderPlugin): + """Load Redshift proxy""" + + product_types = {"redshiftproxy"} + representations = {"rs"} + + label = "Import Redshift Proxy" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, options=None): + """Plugin entry point.""" + product_type = context["product"]["productType"] + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Ensure Redshift for Maya is loaded. + cmds.loadPlugin("redshift4maya", quiet=True) + + path = self.filepath_from_context(context) + with maintained_selection(): + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + nodes, group_node = self.create_rs_proxy(name, path) + + self[:] = nodes + if not nodes: + return + + # colour the group node + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1) + cmds.setAttr( + "{0}.outlinerColor".format(group_node), red, green, blue + ) + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + rs_meshes = cmds.ls(members, type="RedshiftProxyMesh") + assert rs_meshes, "Cannot find RedshiftProxyMesh in container" + repre_entity = context["representation"] + filename = get_representation_path(repre_entity) + + for rs_mesh in rs_meshes: + cmds.setAttr("{}.fileName".format(rs_mesh), + filename, + type="string") + + # Update metadata + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + + def remove(self, container): + + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) + + def switch(self, container, context): + self.update(container, context) + + def create_rs_proxy(self, name, path): + """Creates Redshift Proxies showing a proxy object. + + Args: + name (str): Proxy name. + path (str): Path to proxy file. + + Returns: + (str, str): Name of mesh with Redshift proxy and its parent + transform. + + """ + rs_mesh = cmds.createNode( + 'RedshiftProxyMesh', name="{}_RS".format(name)) + mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name)) + + cmds.setAttr("{}.fileName".format(rs_mesh), + path, + type="string") + + cmds.connectAttr("{}.outMesh".format(rs_mesh), + "{}.inMesh".format(mesh_shape)) + + # TODO: use the assigned shading group as shaders if existed + # assign default shader to redshift proxy + if cmds.ls("initialShadingGroup", type="shadingEngine"): + cmds.sets(mesh_shape, forceElement="initialShadingGroup") + + group_node = cmds.group(empty=True, name="{}_GRP".format(name)) + mesh_transform = cmds.listRelatives(mesh_shape, + parent=True, fullPath=True) + cmds.parent(mesh_transform, group_node) + nodes = [rs_mesh, mesh_shape, group_node] + + # determine if we need to enable animation support + files_in_folder = os.listdir(os.path.dirname(path)) + collections, remainder = clique.assemble(files_in_folder) + + if collections: + cmds.setAttr("{}.useFrameExtension".format(rs_mesh), 1) + + return nodes, group_node diff --git a/client/ayon_maya/plugins/load/load_reference.py b/client/ayon_maya/plugins/load/load_reference.py new file mode 100644 index 00000000..4aad3ddb --- /dev/null +++ b/client/ayon_maya/plugins/load/load_reference.py @@ -0,0 +1,354 @@ +import difflib +import contextlib + +from maya import cmds +import qargparse + +from ayon_core.settings import get_project_settings +from ayon_maya.api import plugin +from ayon_maya.api.lib import ( + maintained_selection, + get_container_members, + parent_nodes, + create_rig_animation_instance +) + + +@contextlib.contextmanager +def preserve_modelpanel_cameras(container, log=None): + """Preserve camera members of container in the modelPanels. + + This is used to ensure a camera remains in the modelPanels after updating + to a new version. + + """ + + # Get the modelPanels that used the old camera + members = get_container_members(container) + old_cameras = set(cmds.ls(members, type="camera", long=True)) + if not old_cameras: + # No need to manage anything + yield + return + + panel_cameras = {} + for panel in cmds.getPanel(type="modelPanel"): + cam = cmds.ls(cmds.modelPanel(panel, query=True, camera=True), + long=True)[0] + + # Often but not always maya returns the transform from the + # modelPanel as opposed to the camera shape, so we convert it + # to explicitly be the camera shape + if cmds.nodeType(cam) != "camera": + cam = cmds.listRelatives(cam, + children=True, + fullPath=True, + type="camera")[0] + if cam in old_cameras: + panel_cameras[panel] = cam + + if not panel_cameras: + # No need to manage anything + yield + return + + try: + yield + finally: + new_members = get_container_members(container) + new_cameras = set(cmds.ls(new_members, type="camera", long=True)) + if not new_cameras: + return + + for panel, cam_name in panel_cameras.items(): + new_camera = None + if cam_name in new_cameras: + new_camera = cam_name + elif len(new_cameras) == 1: + new_camera = next(iter(new_cameras)) + else: + # Multiple cameras in the updated container but not an exact + # match detected by name. Find the closest match + matches = difflib.get_close_matches(word=cam_name, + possibilities=new_cameras, + n=1) + if matches: + new_camera = matches[0] # best match + if log: + log.info("Camera in '{}' restored with " + "closest match camera: {} (before: {})" + .format(panel, new_camera, cam_name)) + + if not new_camera: + # Unable to find the camera to re-apply in the modelpanel + continue + + cmds.modelPanel(panel, edit=True, camera=new_camera) + + +class ReferenceLoader(plugin.ReferenceLoader): + """Reference file""" + + product_types = { + "model", + "pointcache", + "proxyAbc", + "animation", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "camera", + "rig", + "camerarig", + "staticMesh", + "skeletalMesh", + "mvLook", + "matchmove", + } + + representations = {"ma", "abc", "fbx", "mb"} + + label = "Reference" + order = -10 + icon = "code-fork" + color = "orange" + + def process_reference(self, context, name, namespace, options): + import maya.cmds as cmds + + product_type = context["product"]["productType"] + project_name = context["project"]["name"] + # True by default to keep legacy behaviours + attach_to_root = options.get("attach_to_root", True) + group_name = options["group_name"] + + # no group shall be created + if not attach_to_root: + group_name = namespace + + kwargs = {} + if "file_options" in options: + kwargs["options"] = options["file_options"] + if "file_type" in options: + kwargs["type"] = options["file_type"] + + path = self.filepath_from_context(context) + with maintained_selection(): + cmds.loadPlugin("AbcImport.mll", quiet=True) + + file_url = self.prepare_root_value(path, project_name) + nodes = cmds.file(file_url, + namespace=namespace, + sharedReferenceFile=False, + reference=True, + returnNewNodes=True, + groupReference=attach_to_root, + groupName=group_name, + **kwargs) + + shapes = cmds.ls(nodes, shapes=True, long=True) + + new_nodes = (list(set(nodes) - set(shapes))) + + # if there are cameras, try to lock their transforms + self._lock_camera_transforms(new_nodes) + + current_namespace = cmds.namespaceInfo(currentNamespace=True) + + if current_namespace != ":": + group_name = current_namespace + ":" + group_name + + self[:] = new_nodes + + if attach_to_root: + group_name = "|" + group_name + roots = cmds.listRelatives(group_name, + children=True, + fullPath=True) or [] + + if product_type not in { + "layout", "setdress", "mayaAscii", "mayaScene" + }: + # QUESTION Why do we need to exclude these families? + with parent_nodes(roots, parent=None): + cmds.xform(group_name, zeroTransformPivots=True) + + settings = get_project_settings(project_name) + + display_handle = settings['maya']['load'].get( + 'reference_loader', {} + ).get('display_handle', True) + cmds.setAttr( + "{}.displayHandle".format(group_name), display_handle + ) + + color = plugin.get_load_color_for_product_type( + product_type, settings + ) + if color is not None: + red, green, blue = color + cmds.setAttr("{}.useOutlinerColor".format(group_name), 1) + cmds.setAttr( + "{}.outlinerColor".format(group_name), + red, + green, + blue + ) + + cmds.setAttr( + "{}.displayHandle".format(group_name), display_handle + ) + # get bounding box + bbox = cmds.exactWorldBoundingBox(group_name) + # get pivot position on world space + pivot = cmds.xform(group_name, q=True, sp=True, ws=True) + # center of bounding box + cx = (bbox[0] + bbox[3]) / 2 + cy = (bbox[1] + bbox[4]) / 2 + cz = (bbox[2] + bbox[5]) / 2 + # add pivot position to calculate offset + cx = cx + pivot[0] + cy = cy + pivot[1] + cz = cz + pivot[2] + # set selection handle offset to center of bounding box + cmds.setAttr("{}.selectHandleX".format(group_name), cx) + cmds.setAttr("{}.selectHandleY".format(group_name), cy) + cmds.setAttr("{}.selectHandleZ".format(group_name), cz) + + if product_type == "rig": + self._post_process_rig(namespace, context, options) + else: + if "translate" in options: + if not attach_to_root and new_nodes: + root_nodes = cmds.ls(new_nodes, assemblies=True, + long=True) + # we assume only a single root is ever loaded + group_name = root_nodes[0] + cmds.setAttr("{}.translate".format(group_name), + *options["translate"]) + return new_nodes + + def switch(self, container, context): + self.update(container, context) + + def update(self, container, context): + with preserve_modelpanel_cameras(container, log=self.log): + super(ReferenceLoader, self).update(container, context) + + # We also want to lock camera transforms on any new cameras in the + # reference or for a camera which might have changed names. + members = get_container_members(container) + self._lock_camera_transforms(members) + + def _post_process_rig(self, namespace, context, options): + + nodes = self[:] + create_rig_animation_instance( + nodes, context, namespace, options=options, log=self.log + ) + + def _lock_camera_transforms(self, nodes): + cameras = cmds.ls(nodes, type="camera") + if not cameras: + return + + # Check the Maya version, lockTransform has been introduced since + # Maya 2016.5 Ext 2 + version = int(cmds.about(version=True)) + if version >= 2016: + for camera in cameras: + cmds.camera(camera, edit=True, lockTransform=True) + else: + self.log.warning("This version of Maya does not support locking of" + " transforms of cameras.") + + +class MayaUSDReferenceLoader(ReferenceLoader): + """Reference USD file to native Maya nodes using MayaUSDImport reference""" + + label = "Reference Maya USD" + product_types = {"usd"} + representations = {"usd"} + extensions = {"usd", "usda", "usdc"} + + options = ReferenceLoader.options + [ + qargparse.Boolean( + "readAnimData", + label="Load anim data", + default=True, + help="Load animation data from USD file" + ), + qargparse.Boolean( + "useAsAnimationCache", + label="Use as animation cache", + default=True, + help=( + "Imports geometry prims with time-sampled point data using a " + "point-based deformer that references the imported " + "USD file.\n" + "This provides better import and playback performance when " + "importing time-sampled geometry from USD, and should " + "reduce the weight of the resulting Maya scene." + ) + ), + qargparse.Boolean( + "importInstances", + label="Import instances", + default=True, + help=( + "Import USD instanced geometries as Maya instanced shapes. " + "Will flatten the scene otherwise." + ) + ), + qargparse.String( + "primPath", + label="Prim Path", + default="/", + help=( + "Name of the USD scope where traversing will begin.\n" + "The prim at the specified primPath (including the prim) will " + "be imported.\n" + "Specifying the pseudo-root (/) means you want " + "to import everything in the file.\n" + "If the passed prim path is empty, it will first try to " + "import the defaultPrim for the rootLayer if it exists.\n" + "Otherwise, it will behave as if the pseudo-root was passed " + "in." + ) + ) + ] + + file_type = "USD Import" + + def process_reference(self, context, name, namespace, options): + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + def bool_option(key, default): + # Shorthand for getting optional boolean file option from options + value = int(bool(options.get(key, default))) + return "{}={}".format(key, value) + + def string_option(key, default): + # Shorthand for getting optional string file option from options + value = str(options.get(key, default)) + return "{}={}".format(key, value) + + options["file_options"] = ";".join([ + string_option("primPath", default="/"), + bool_option("importInstances", default=True), + bool_option("useAsAnimationCache", default=True), + bool_option("readAnimData", default=True), + # TODO: Expose more parameters + # "preferredMaterial=none", + # "importRelativeTextures=Automatic", + # "useCustomFrameRange=0", + # "startTime=0", + # "endTime=0", + # "importUSDZTextures=0" + ]) + options["file_type"] = self.file_type + + return super(MayaUSDReferenceLoader, self).process_reference( + context, name, namespace, options + ) diff --git a/client/ayon_maya/plugins/load/load_rendersetup.py b/client/ayon_maya/plugins/load/load_rendersetup.py new file mode 100644 index 00000000..96f38ba1 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_rendersetup.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +"""Load and update RenderSetup settings. + +Working with RenderSetup setting is Maya is done utilizing json files. +When this json is loaded, it will overwrite all settings on RenderSetup +instance. +""" + +import json +import sys +import six +import contextlib + +from ayon_core.lib import BoolDef, EnumDef +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api import lib +from ayon_maya.api.pipeline import containerise + +from maya import cmds +import maya.app.renderSetup.model.renderSetup as renderSetup + + +@contextlib.contextmanager +def mark_all_imported(enabled): + """Mark all imported nodes accepted by removing the `imported` attribute""" + if not enabled: + yield + return + + node_types = cmds.pluginInfo("renderSetup", query=True, dependNode=True) + + # Get node before load, then we can disable `imported` + # attribute on all new render setup layers after import + before = cmds.ls(type=node_types, long=True) + try: + yield + finally: + after = cmds.ls(type=node_types, long=True) + for node in (node for node in after if node not in before): + if cmds.attributeQuery("imported", + node=node, + exists=True): + plug = "{}.imported".format(node) + if cmds.getAttr(plug): + cmds.deleteAttr(plug) + + +class RenderSetupLoader(load.LoaderPlugin): + """Load json preset for RenderSetup overwriting current one.""" + + product_types = {"rendersetup"} + representations = {"json"} + defaults = ['Main'] + + label = "Load RenderSetup template" + icon = "tablet" + color = "orange" + + options = [ + BoolDef("accept_import", + label="Accept import on load", + tooltip=( + "By default importing or pasting Render Setup collections " + "will display them italic in the Render Setup list.\nWith " + "this enabled the load will directly mark the import " + "'accepted' and remove the italic view." + ), + default=True), + BoolDef("load_managed", + label="Load Managed", + tooltip=( + "Containerize the rendersetup on load so it can be " + "'updated' later." + ), + default=True), + EnumDef("import_mode", + label="Import mode", + items={ + renderSetup.DECODE_AND_OVERWRITE: ( + "Flush existing render setup and " + "add without any namespace" + ), + renderSetup.DECODE_AND_MERGE: ( + "Merge with the existing render setup objects and " + "rename the unexpected objects" + ), + renderSetup.DECODE_AND_RENAME: ( + "Renaming all decoded render setup objects to not " + "conflict with the existing render setup" + ), + }, + default=renderSetup.DECODE_AND_OVERWRITE) + ] + + def load(self, context, name, namespace, data): + """Load RenderSetup settings.""" + + path = self.filepath_from_context(context) + + accept_import = data.get("accept_import", True) + import_mode = data.get("import_mode", renderSetup.DECODE_AND_OVERWRITE) + + self.log.info(">>> loading json [ {} ]".format(path)) + with mark_all_imported(accept_import): + with open(path, "r") as file: + renderSetup.instance().decode( + json.load(file), import_mode, None) + + if data.get("load_managed", True): + self.log.info(">>> containerising [ {} ]".format(name)) + folder_name = context["folder"]["name"] + namespace = namespace or lib.unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + return containerise( + name=name, + namespace=namespace, + nodes=[], + context=context, + loader=self.__class__.__name__) + + def remove(self, container): + """Remove RenderSetup settings instance.""" + container_name = container["objectName"] + + self.log.info("Removing '%s' from Maya.." % container["name"]) + + container_content = cmds.sets(container_name, query=True) or [] + nodes = cmds.ls(container_content, long=True) + + nodes.append(container_name) + + try: + cmds.delete(nodes) + except ValueError: + # Already implicitly deleted by Maya upon removing reference + pass + + def update(self, container, context): + """Update RenderSetup setting by overwriting existing settings.""" + lib.show_message( + "Render setup update", + "Render setup setting will be overwritten by new version. All " + "setting specified by user not included in loaded version " + "will be lost.") + repre_entity = context["representation"] + path = get_representation_path(repre_entity) + with open(path, "r") as file: + try: + renderSetup.instance().decode( + json.load(file), renderSetup.DECODE_AND_OVERWRITE, None) + except Exception: + self.log.error("There were errors during loading") + six.reraise(*sys.exc_info()) + + # Update metadata + node = container["objectName"] + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + self.log.info("... updated") + + def switch(self, container, context): + """Switch representations.""" + self.update(container, context) diff --git a/client/ayon_maya/plugins/load/load_vdb_to_arnold.py b/client/ayon_maya/plugins/load/load_vdb_to_arnold.py new file mode 100644 index 00000000..d326c224 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_vdb_to_arnold.py @@ -0,0 +1,135 @@ +import os + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.plugin import get_load_color_for_product_type +# TODO aiVolume doesn't automatically set velocity fps correctly, set manual? + + +class LoadVDBtoArnold(load.LoaderPlugin): + """Load OpenVDB for Arnold in aiVolume""" + + product_types = {"vdbcache"} + representations = {"vdb"} + + label = "Load VDB to Arnold" + icon = "cloud" + color = "orange" + + def load(self, context, name, namespace, data): + + from maya import cmds + from ayon_maya.api.pipeline import containerise + from ayon_maya.api.lib import unique_namespace + + product_type = context["product"]["productType"] + + # Check if the plugin for arnold is available on the pc + try: + cmds.loadPlugin("mtoa", quiet=True) + except Exception as exc: + self.log.error("Encountered exception:\n%s" % exc) + return + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Root group + label = "{}:{}".format(namespace, name) + root = cmds.group(name=label, empty=True) + + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", red, green, blue) + + # Create VRayVolumeGrid + grid_node = cmds.createNode("aiVolume", + name="{}Shape".format(root), + parent=root) + + path = self.filepath_from_context(context) + self._set_path(grid_node, + path=path, + repre_entity=context["representation"]) + + # Lock the shape node so the user can't delete the transform/shape + # as if it was referenced + cmds.lockNode(grid_node, lock=True) + + nodes = [root, grid_node] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + + from maya import cmds + + repre_entity = context["representation"] + + path = get_representation_path(repre_entity) + + # Find VRayVolumeGrid + members = cmds.sets(container['objectName'], query=True) + grid_nodes = cmds.ls(members, type="aiVolume", long=True) + assert len(grid_nodes) == 1, "This is a bug" + + # Update the VRayVolumeGrid + self._set_path(grid_nodes[0], path=path, repre_entity=repre_entity) + + # Update container representation + cmds.setAttr(container["objectName"] + ".representation", + repre_entity["id"], + type="string") + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + + from maya import cmds + + # Get all members of the AYON container, ensure they are unlocked + # and delete everything + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass + + @staticmethod + def _set_path(grid_node, + path, + repre_entity): + """Apply the settings for the VDB path to the aiVolume node""" + from maya import cmds + + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + is_sequence = "frame" in repre_entity["context"] + cmds.setAttr(grid_node + ".useFrameExtension", is_sequence) + + # Set file path + cmds.setAttr(grid_node + ".filename", path, type="string") diff --git a/client/ayon_maya/plugins/load/load_vdb_to_redshift.py b/client/ayon_maya/plugins/load/load_vdb_to_redshift.py new file mode 100644 index 00000000..5ac8dfa0 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_vdb_to_redshift.py @@ -0,0 +1,145 @@ +import os + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.plugin import get_load_color_for_product_type + + +class LoadVDBtoRedShift(load.LoaderPlugin): + """Load OpenVDB in a Redshift Volume Shape + + Note that the RedshiftVolumeShape is created without a RedshiftVolume + shader assigned. To get the Redshift volume to render correctly assign + a RedshiftVolume shader (in the Hypershade) and set the density, scatter + and emission channels to the channel names of the volumes in the VDB file. + + """ + + product_types = {"vdbcache"} + representations = {"vdb"} + + label = "Load VDB to RedShift" + icon = "cloud" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + + from maya import cmds + from ayon_maya.api.pipeline import containerise + from ayon_maya.api.lib import unique_namespace + + product_type = context["product"]["productType"] + + # Check if the plugin for redshift is available on the pc + try: + cmds.loadPlugin("redshift4maya", quiet=True) + except Exception as exc: + self.log.error("Encountered exception:\n%s" % exc) + return + + # Check if viewport drawing engine is Open GL Core (compat) + render_engine = None + compatible = "OpenGL" + if cmds.optionVar(exists="vp2RenderingEngine"): + render_engine = cmds.optionVar(query="vp2RenderingEngine") + + if not render_engine or not render_engine.startswith(compatible): + raise RuntimeError("Current scene's settings are incompatible." + "See Preferences > Display > Viewport 2.0 to " + "set the render engine to '%s'" + % compatible) + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Root group + label = "{}:{}".format(namespace, name) + root = cmds.createNode("transform", name=label) + + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", red, green, blue) + + # Create VR + volume_node = cmds.createNode("RedshiftVolumeShape", + name="{}RVSShape".format(label), + parent=root) + + self._set_path(volume_node, + path=self.filepath_from_context(context), + representation=context["representation"]) + + nodes = [root, volume_node] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + from maya import cmds + + repre_entity = context["representation"] + path = get_representation_path(repre_entity) + + # Find VRayVolumeGrid + members = cmds.sets(container['objectName'], query=True) + grid_nodes = cmds.ls(members, type="RedshiftVolumeShape", long=True) + assert len(grid_nodes) == 1, "This is a bug" + + # Update the VRayVolumeGrid + self._set_path(grid_nodes[0], path=path, representation=repre_entity) + + # Update container representation + cmds.setAttr(container["objectName"] + ".representation", + repre_entity["id"], + type="string") + + def remove(self, container): + from maya import cmds + + # Get all members of the AYON container, ensure they are unlocked + # and delete everything + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass + + def switch(self, container, context): + self.update(container, context) + + @staticmethod + def _set_path(grid_node, + path, + representation): + """Apply the settings for the VDB path to the RedshiftVolumeShape""" + from maya import cmds + + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + is_sequence = "frame" in representation["context"] + cmds.setAttr(grid_node + ".useFrameExtension", is_sequence) + + # Set file path + cmds.setAttr(grid_node + ".fileName", path, type="string") diff --git a/client/ayon_maya/plugins/load/load_vdb_to_vray.py b/client/ayon_maya/plugins/load/load_vdb_to_vray.py new file mode 100644 index 00000000..7a3129df --- /dev/null +++ b/client/ayon_maya/plugins/load/load_vdb_to_vray.py @@ -0,0 +1,288 @@ +import os + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.plugin import get_load_color_for_product_type + +from maya import cmds + +# List of 3rd Party Channels Mapping names for VRayVolumeGrid +# See: https://docs.chaosgroup.com/display/VRAY4MAYA/Input +# #Input-3rdPartyChannelsMapping +THIRD_PARTY_CHANNELS = { + 2: "Smoke", + 1: "Temperature", + 10: "Fuel", + 4: "Velocity.x", + 5: "Velocity.y", + 6: "Velocity.z", + 7: "Red", + 8: "Green", + 9: "Blue", + 14: "Wavelet Energy", + 19: "Wavelet.u", + 20: "Wavelet.v", + 21: "Wavelet.w", + # These are not in UI or documentation but V-Ray does seem to set these. + 15: "AdvectionOrigin.x", + 16: "AdvectionOrigin.y", + 17: "AdvectionOrigin.z", + +} + + +def _fix_duplicate_vvg_callbacks(): + """Workaround to kill duplicate VRayVolumeGrids attribute callbacks. + + This fixes a huge lag in Maya on switching 3rd Party Channels Mappings + or to different .vdb file paths because it spams an attribute changed + callback: `vvgUserChannelMappingsUpdateUI`. + + ChaosGroup bug ticket: 154-008-9890 + + Found with: + - Maya 2019.2 on Windows 10 + - V-Ray: V-Ray Next for Maya, update 1 version 4.12.01.00001 + + Bug still present in: + - Maya 2022.1 on Windows 10 + - V-Ray 5 for Maya, Update 2.1 (v5.20.01 from Dec 16 2021) + + """ + # todo(roy): Remove when new V-Ray release fixes duplicate calls + + jobs = cmds.scriptJob(listJobs=True) + + matched = set() + for entry in jobs: + # Remove the number + index, callback = entry.split(":", 1) + callback = callback.strip() + + # Detect whether it is a `vvgUserChannelMappingsUpdateUI` + # attribute change callback + if callback.startswith('"-runOnce" 1 "-attributeChange" "'): + if '"vvgUserChannelMappingsUpdateUI(' in callback: + if callback in matched: + # If we've seen this callback before then + # delete the duplicate callback + cmds.scriptJob(kill=int(index)) + else: + matched.add(callback) + + +class LoadVDBtoVRay(load.LoaderPlugin): + """Load OpenVDB in a V-Ray Volume Grid""" + + product_types = {"vdbcache"} + representations = {"vdb"} + + label = "Load VDB to VRay" + icon = "cloud" + color = "orange" + + def load(self, context, name, namespace, data): + + from ayon_maya.api.lib import unique_namespace + from ayon_maya.api.pipeline import containerise + + path = self.filepath_from_context(context) + assert os.path.exists(path), ( + "Path does not exist: %s" % path + ) + + product_type = context["product"]["productType"] + + # Ensure V-ray is loaded with the vrayvolumegrid + if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): + cmds.loadPlugin("vrayformaya") + if not cmds.pluginInfo("vrayvolumegrid", query=True, loaded=True): + cmds.loadPlugin("vrayvolumegrid") + + # Check if viewport drawing engine is Open GL Core (compat) + render_engine = None + compatible = "OpenGLCoreProfileCompat" + if cmds.optionVar(exists="vp2RenderingEngine"): + render_engine = cmds.optionVar(query="vp2RenderingEngine") + + if not render_engine or render_engine != compatible: + self.log.warning("Current scene's settings are incompatible." + "See Preferences > Display > Viewport 2.0 to " + "set the render engine to '%s'" % compatible) + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Root group + label = "{}:{}_VDB".format(namespace, name) + root = cmds.group(name=label, empty=True) + + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", red, green, blue) + + # Create VRayVolumeGrid + grid_node = cmds.createNode("VRayVolumeGrid", + name="{}Shape".format(label), + parent=root) + + # Ensure .currentTime is connected to time1.outTime + cmds.connectAttr("time1.outTime", grid_node + ".currentTime") + + # Set path + self._set_path(grid_node, path, show_preset_popup=True) + + # Lock the shape node so the user can't delete the transform/shape + # as if it was referenced + cmds.lockNode(grid_node, lock=True) + + nodes = [root, grid_node] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def _set_path(self, grid_node, path, show_preset_popup=True): + + from ayon_maya.api.lib import attribute_values + from maya import cmds + + def _get_filename_from_folder(path): + # Using the sequence of .vdb files we check the frame range, etc. + # to set the filename with #### padding. + files = sorted(x for x in os.listdir(path) if x.endswith(".vdb")) + if not files: + raise RuntimeError("Couldn't find .vdb files in: %s" % path) + + if len(files) == 1: + # Ensure check for single file is also done in folder + fname = files[0] + else: + # Sequence + import clique + # todo: check support for negative frames as input + collections, remainder = clique.assemble(files) + assert len(collections) == 1, ( + "Must find a single image sequence, " + "found: %s" % (collections,) + ) + collection = collections[0] + + fname = collection.format('{head}{{padding}}{tail}') + padding = collection.padding + if padding == 0: + # Clique doesn't provide padding if the frame number never + # starts with a zero and thus has never any visual padding. + # So we fall back to the smallest frame number as padding. + padding = min(len(str(i)) for i in collection.indexes) + + # Supply frame/padding with # signs + padding_str = "#" * padding + fname = fname.format(padding=padding_str) + + return os.path.join(path, fname) + + # The path is either a single file or sequence in a folder so + # we do a quick lookup for our files + if os.path.isfile(path): + path = os.path.dirname(path) + path = _get_filename_from_folder(path) + + # Even when not applying a preset V-Ray will reset the 3rd Party + # Channels Mapping of the VRayVolumeGrid when setting the .inPath + # value. As such we try and preserve the values ourselves. + # Reported as ChaosGroup bug ticket: 154-011-2909  + # todo(roy): Remove when new V-Ray release preserves values + original_user_mapping = cmds.getAttr(grid_node + ".usrchmap") or "" + + # Workaround for V-Ray bug: fix lag on path change, see function + _fix_duplicate_vvg_callbacks() + + # Suppress preset pop-up if we want. + popup_attr = "{0}.inDontOfferPresets".format(grid_node) + popup = {popup_attr: not show_preset_popup} + with attribute_values(popup): + cmds.setAttr(grid_node + ".inPath", path, type="string") + + # Reapply the 3rd Party channels user mapping when no preset popup + # was shown to the user + if not show_preset_popup: + channels = cmds.getAttr(grid_node + ".usrchmapallch").split(";") + channels = set(channels) # optimize lookup + restored_mapping = "" + for entry in original_user_mapping.split(";"): + if not entry: + # Ignore empty entries + continue + + # If 3rd Party Channels selection channel still exists then + # add it again. + index, channel = entry.split(",") + attr = THIRD_PARTY_CHANNELS.get(int(index), + # Fallback for when a mapping + # was set that is not in the + # documentation + "???") + if channel in channels: + restored_mapping += entry + ";" + else: + self.log.warning("Can't preserve '%s' mapping due to " + "missing channel '%s' on node: " + "%s" % (attr, channel, grid_node)) + + if restored_mapping: + cmds.setAttr(grid_node + ".usrchmap", + restored_mapping, + type="string") + + def update(self, container, context): + repre_entity = context["representation"] + + path = get_representation_path(repre_entity) + + # Find VRayVolumeGrid + members = cmds.sets(container['objectName'], query=True) + grid_nodes = cmds.ls(members, type="VRayVolumeGrid", long=True) + assert len(grid_nodes) > 0, "This is a bug" + + # Update the VRayVolumeGrid + for grid_node in grid_nodes: + self._set_path(grid_node, path=path, show_preset_popup=False) + + # Update container representation + cmds.setAttr(container["objectName"] + ".representation", + repre_entity["id"], + type="string") + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + + # Get all members of the AYON container, ensure they are unlocked + # and delete everything + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass diff --git a/client/ayon_maya/plugins/load/load_vrayproxy.py b/client/ayon_maya/plugins/load/load_vrayproxy.py new file mode 100644 index 00000000..014c8fea --- /dev/null +++ b/client/ayon_maya/plugins/load/load_vrayproxy.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +"""Loader for Vray Proxy files. + +If there are Alembics published along vray proxy (in the same version), +loader will use them instead of native vray vrmesh format. + +""" +import os + +import maya.cmds as cmds + +import ayon_api +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path, +) +from ayon_maya.api.lib import ( + maintained_selection, + namespaced, + unique_namespace +) +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.plugin import get_load_color_for_product_type + + +class VRayProxyLoader(load.LoaderPlugin): + """Load VRay Proxy with Alembic or VrayMesh.""" + + product_types = {"vrayproxy", "model", "pointcache", "animation"} + representations = {"vrmesh", "abc"} + + label = "Import VRay Proxy" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, options=None): + # type: (dict, str, str, dict) -> None + """Loader entry point. + + Args: + context (dict): Loaded representation context. + name (str): Name of container. + namespace (str): Optional namespace name. + options (dict): Optional loader options. + + """ + + product_type = context["product"]["productType"] + + # get all representations for this version + filename = self._get_abc( + context["project"]["name"], context["version"]["id"] + ) + if not filename: + filename = self.filepath_from_context(context) + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Ensure V-Ray for Maya is loaded. + cmds.loadPlugin("vrayformaya", quiet=True) + + with maintained_selection(): + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + nodes, group_node = self.create_vray_proxy( + name, filename=filename) + + self[:] = nodes + if not nodes: + return + + # colour the group node + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1) + cmds.setAttr( + "{0}.outlinerColor".format(group_node), red, green, blue + ) + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + # type: (dict, dict) -> None + """Update container with specified representation.""" + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + vraymeshes = cmds.ls(members, type="VRayProxy") + assert vraymeshes, "Cannot find VRayMesh in container" + + # get all representations for this version + repre_entity = context["representation"] + filename = self._get_abc( + context["project"]["name"], context["version"]["id"] + ) + if not filename: + filename = get_representation_path(repre_entity) + + for vray_mesh in vraymeshes: + cmds.setAttr("{}.fileName".format(vray_mesh), + filename, + type="string") + + # Update metadata + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + + def remove(self, container): + # type: (dict) -> None + """Remove loaded container.""" + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) + + def switch(self, container, context): + # type: (dict, dict) -> None + """Switch loaded representation.""" + self.update(container, context) + + def create_vray_proxy(self, name, filename): + # type: (str, str) -> (list, str) + """Re-create the structure created by VRay to support vrmeshes + + Args: + name (str): Name of the asset. + filename (str): File name of vrmesh. + + Returns: + nodes(list) + + """ + + if name is None: + name = os.path.splitext(os.path.basename(filename))[0] + + parent = cmds.createNode("transform", name=name) + proxy = cmds.createNode( + "VRayProxy", name="{}Shape".format(name), parent=parent) + cmds.setAttr(proxy + ".fileName", filename, type="string") + cmds.connectAttr("time1.outTime", proxy + ".currentFrame") + + return [parent, proxy], parent + + def _get_abc(self, project_name, version_id): + # type: (str) -> str + """Get abc representation file path if present. + + If here is published Alembic (abc) representation published along + vray proxy, get is file path. + + Args: + project_name (str): Project name. + version_id (str): Version hash id. + + Returns: + str: Path to file. + None: If abc not found. + + """ + self.log.debug( + "Looking for abc in published representations of this version.") + abc_rep = ayon_api.get_representation_by_name( + project_name, "abc", version_id + ) + if abc_rep: + self.log.debug("Found, we'll link alembic to vray proxy.") + file_name = get_representation_path(abc_rep) + self.log.debug("File: {}".format(file_name)) + return file_name + + return "" diff --git a/client/ayon_maya/plugins/load/load_vrayscene.py b/client/ayon_maya/plugins/load/load_vrayscene.py new file mode 100644 index 00000000..17b0c2cd --- /dev/null +++ b/client/ayon_maya/plugins/load/load_vrayscene.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +import maya.cmds as cmds # noqa +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api.lib import ( + maintained_selection, + namespaced, + unique_namespace +) +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.plugin import get_load_color_for_product_type + + +class VRaySceneLoader(load.LoaderPlugin): + """Load Vray scene""" + + product_types = {"vrayscene_layer"} + representations = {"vrscene"} + + label = "Import VRay Scene" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, data): + product_type = context["product"]["productType"] + + folder_name = context["folder"]["name"] + namespace = namespace or unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) + + # Ensure V-Ray for Maya is loaded. + cmds.loadPlugin("vrayformaya", quiet=True) + + with maintained_selection(): + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + nodes, root_node = self.create_vray_scene( + name, + filename=self.filepath_from_context(context) + ) + + self[:] = nodes + if not nodes: + return + + # colour the group node + project_name = context["project"]["name"] + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr("{0}.useOutlinerColor".format(root_node), 1) + cmds.setAttr( + "{0}.outlinerColor".format(root_node), red, green, blue + ) + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, context): + + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + vraymeshes = cmds.ls(members, type="VRayScene") + assert vraymeshes, "Cannot find VRayScene in container" + + repre_entity = context["representation"] + filename = get_representation_path(repre_entity) + + for vray_mesh in vraymeshes: + cmds.setAttr("{}.FilePath".format(vray_mesh), + filename, + type="string") + + # Update metadata + cmds.setAttr("{}.representation".format(node), + repre_entity["id"], + type="string") + + def remove(self, container): + + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) + + def switch(self, container, context): + self.update(container, context) + + def create_vray_scene(self, name, filename): + """Re-create the structure created by VRay to support vrscenes + + Args: + name(str): name of the asset + + Returns: + nodes(list) + """ + + # Create nodes + mesh_node_name = "VRayScene_{}".format(name) + + trans = cmds.createNode( + "transform", name=mesh_node_name) + vray_scene = cmds.createNode( + "VRayScene", name="{}_VRSCN".format(mesh_node_name), parent=trans) + mesh = cmds.createNode( + "mesh", name="{}_Shape".format(mesh_node_name), parent=trans) + + cmds.connectAttr( + "{}.outMesh".format(vray_scene), "{}.inMesh".format(mesh)) + + cmds.setAttr("{}.FilePath".format(vray_scene), filename, type="string") + + # Lock the shape nodes so the user cannot delete these + cmds.lockNode(mesh, lock=True) + cmds.lockNode(vray_scene, lock=True) + + # Create important connections + cmds.connectAttr("time1.outTime", + "{0}.inputTime".format(trans)) + + # Connect mesh to initialShadingGroup + cmds.sets([mesh], forceElement="initialShadingGroup") + + nodes = [trans, vray_scene, mesh] + + # Fix: Force refresh so the mesh shows correctly after creation + cmds.refresh() + + return nodes, trans diff --git a/client/ayon_maya/plugins/load/load_xgen.py b/client/ayon_maya/plugins/load/load_xgen.py new file mode 100644 index 00000000..f1f25640 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_xgen.py @@ -0,0 +1,187 @@ +import os +import shutil + +import maya.cmds as cmds +import xgenm + +from qtpy import QtWidgets + +import ayon_maya.api.plugin +from ayon_maya.api.lib import ( + maintained_selection, + get_container_members, + attribute_values, + write_xgen_file +) +from ayon_maya.api import current_file +from ayon_core.pipeline import get_representation_path + + +class XgenLoader(ayon_maya.api.plugin.ReferenceLoader): + """Load Xgen as reference""" + + product_types = {"xgen"} + representations = {"ma", "mb"} + + label = "Reference Xgen" + icon = "code-fork" + color = "orange" + + def get_xgen_xgd_paths(self, palette): + _, maya_extension = os.path.splitext(current_file()) + xgen_file = current_file().replace( + maya_extension, + "__{}.xgen".format(palette.replace("|", "").replace(":", "__")) + ) + xgd_file = xgen_file.replace(".xgen", ".xgd") + return xgen_file, xgd_file + + def process_reference(self, context, name, namespace, options): + # Validate workfile has a path. + if current_file() is None: + QtWidgets.QMessageBox.warning( + None, + "", + "Current workfile has not been saved. Please save the workfile" + " before loading an Xgen." + ) + return + + maya_filepath = self.prepare_root_value( + file_url=self.filepath_from_context(context), + project_name=context["project"]["name"] + ) + + # Reference xgen. Xgen does not like being referenced in under a group. + with maintained_selection(): + nodes = cmds.file( + maya_filepath, + namespace=namespace, + sharedReferenceFile=False, + reference=True, + returnNewNodes=True + ) + + xgen_palette = cmds.ls( + nodes, type="xgmPalette", long=True + )[0].replace("|", "") + + xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette) + self.set_palette_attributes(xgen_palette, xgen_file, xgd_file) + + # Change the cache and disk values of xgDataPath and xgProjectPath + # to ensure paths are setup correctly. + project_path = os.path.dirname(current_file()).replace("\\", "/") + xgenm.setAttr("xgProjectPath", project_path, xgen_palette) + data_path = "${{PROJECT}}xgen/collections/{};{}".format( + xgen_palette.replace(":", "__ns__"), + xgenm.getAttr("xgDataPath", xgen_palette) + ) + xgenm.setAttr("xgDataPath", data_path, xgen_palette) + + data = {"xgProjectPath": project_path, "xgDataPath": data_path} + write_xgen_file(data, xgen_file) + + # This create an expression attribute of float. If we did not add + # any changes to collection, then Xgen does not create an xgd file + # on save. This gives errors when launching the workfile again due + # to trying to find the xgd file. + name = "custom_float_ignore" + if name not in xgenm.customAttrs(xgen_palette): + xgenm.addCustomAttr( + "custom_float_ignore", xgen_palette + ) + + shapes = cmds.ls(nodes, shapes=True, long=True) + + new_nodes = (list(set(nodes) - set(shapes))) + + self[:] = new_nodes + + return new_nodes + + def set_palette_attributes(self, xgen_palette, xgen_file, xgd_file): + cmds.setAttr( + "{}.xgBaseFile".format(xgen_palette), + os.path.basename(xgen_file), + type="string" + ) + cmds.setAttr( + "{}.xgFileName".format(xgen_palette), + os.path.basename(xgd_file), + type="string" + ) + cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True) + + def update(self, container, context): + """Workflow for updating Xgen. + + - Export changes to delta file. + - Copy and overwrite the workspace .xgen file. + - Set collection attributes to not include delta files. + - Update xgen maya file reference. + - Apply the delta file changes. + - Reset collection attributes to include delta files. + + We have to do this workflow because when using referencing of the xgen + collection, Maya implicitly imports the Xgen data from the xgen file so + we dont have any control over when adding the delta file changes. + + There is an implicit increment of the xgen and delta files, due to + using the workfile basename. + """ + # Storing current description to try and maintain later. + current_description = ( + xgenm.xgGlobal.DescriptionEditor.currentDescription() + ) + + container_node = container["objectName"] + members = get_container_members(container_node) + xgen_palette = cmds.ls( + members, type="xgmPalette", long=True + )[0].replace("|", "") + xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette) + + # Export current changes to apply later. + xgenm.createDelta(xgen_palette.replace("|", ""), xgd_file) + + self.set_palette_attributes(xgen_palette, xgen_file, xgd_file) + + repre_entity = context["representation"] + maya_file = get_representation_path(repre_entity) + _, extension = os.path.splitext(maya_file) + new_xgen_file = maya_file.replace(extension, ".xgen") + data_path = "" + with open(new_xgen_file, "r") as f: + for line in f: + if line.startswith("\txgDataPath"): + line = line.rstrip() + data_path = line.split("\t")[-1] + break + + project_path = os.path.dirname(current_file()).replace("\\", "/") + data_path = "${{PROJECT}}xgen/collections/{};{}".format( + xgen_palette.replace(":", "__ns__"), + data_path + ) + data = {"xgProjectPath": project_path, "xgDataPath": data_path} + shutil.copy(new_xgen_file, xgen_file) + write_xgen_file(data, xgen_file) + + attribute_data = { + "{}.xgFileName".format(xgen_palette): os.path.basename(xgen_file), + "{}.xgBaseFile".format(xgen_palette): "", + "{}.xgExportAsDelta".format(xgen_palette): False + } + with attribute_values(attribute_data): + super().update(container, context) + + xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file) + + # Restore current selected description if it exists. + if cmds.objExists(current_description): + xgenm.xgGlobal.DescriptionEditor.setCurrentDescription( + current_description + ) + # Full UI refresh. + xgenm.xgGlobal.DescriptionEditor.refresh("Full") diff --git a/client/ayon_maya/plugins/load/load_yeti_cache.py b/client/ayon_maya/plugins/load/load_yeti_cache.py new file mode 100644 index 00000000..bb7d2792 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_yeti_cache.py @@ -0,0 +1,401 @@ +import os +import json +import re +from collections import defaultdict + +import clique +from maya import cmds + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import ( + load, + get_representation_path +) +from ayon_maya.api import lib +from ayon_maya.api.yeti import create_yeti_variable +from ayon_maya.api.pipeline import containerise +from ayon_maya.api.plugin import get_load_color_for_product_type + + +# Do not reset these values on update but only apply on first load +# to preserve any potential local overrides +SKIP_UPDATE_ATTRS = { + "displayOutput", + "viewportDensity", + "viewportWidth", + "viewportLength", + "renderDensity", + "renderWidth", + "renderLength", + "increaseRenderBounds" +} + +SKIP_ATTR_MESSAGE = ( + "Skipping updating %s.%s to %s because it " + "is considered a local overridable attribute. " + "Either set manually or the load the cache " + "anew." +) + + +def set_attribute(node, attr, value): + """Wrapper of set attribute which ignores None values""" + if value is None: + return + lib.set_attribute(node, attr, value) + + +class YetiCacheLoader(load.LoaderPlugin): + """Load Yeti Cache with one or more Yeti nodes""" + + product_types = {"yeticache", "yetiRig"} + representations = {"fur"} + + label = "Load Yeti Cache" + order = -9 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + """Loads a .fursettings file defining how to load .fur sequences + + A single yeticache or yetiRig can have more than a single pgYetiMaya + nodes and thus load more than a single yeti.fur sequence. + + The .fursettings file defines what the node names should be and also + what "cbId" attribute they should receive to match the original source + and allow published looks to also work for Yeti rigs and its caches. + + """ + + product_type = context["product"]["productType"] + + # Build namespace + folder_name = context["folder"]["name"] + if namespace is None: + namespace = self.create_namespace(folder_name) + + # Ensure Yeti is loaded + if not cmds.pluginInfo("pgYetiMaya", query=True, loaded=True): + cmds.loadPlugin("pgYetiMaya", quiet=True) + + # Create Yeti cache nodes according to settings + path = self.filepath_from_context(context) + settings = self.read_settings(path) + nodes = [] + for node in settings["nodes"]: + nodes.extend(self.create_node(namespace, node)) + + group_name = "{}:{}".format(namespace, name) + group_node = cmds.group(nodes, name=group_name) + project_name = context["project"]["name"] + + settings = get_project_settings(project_name) + color = get_load_color_for_product_type(product_type, settings) + if color is not None: + red, green, blue = color + cmds.setAttr(group_node + ".useOutlinerColor", 1) + cmds.setAttr(group_node + ".outlinerColor", red, green, blue) + + nodes.append(group_node) + + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__ + ) + + def remove(self, container): + + from maya import cmds + + namespace = container["namespace"] + container_name = container["objectName"] + + self.log.info("Removing '%s' from Maya.." % container["name"]) + + container_content = cmds.sets(container_name, query=True) + nodes = cmds.ls(container_content, long=True) + + nodes.append(container_name) + + try: + cmds.delete(nodes) + except ValueError: + # Already implicitly deleted by Maya upon removing reference + pass + + cmds.namespace(removeNamespace=namespace, deleteNamespaceContent=True) + + def update(self, container, context): + repre_entity = context["representation"] + namespace = container["namespace"] + container_node = container["objectName"] + + path = get_representation_path(repre_entity) + settings = self.read_settings(path) + + # Collect scene information of asset + set_members = lib.get_container_members(container) + container_root = lib.get_container_transforms(container, + members=set_members, + root=True) + scene_nodes = cmds.ls(set_members, type="pgYetiMaya", long=True) + + # Build lookup with cbId as keys + scene_lookup = defaultdict(list) + for node in scene_nodes: + cb_id = lib.get_id(node) + scene_lookup[cb_id].append(node) + + # Re-assemble metadata with cbId as keys + meta_data_lookup = {n["cbId"]: n for n in settings["nodes"]} + + # Delete nodes by "cbId" that are not in the updated version + to_delete_lookup = {cb_id for cb_id in scene_lookup.keys() if + cb_id not in meta_data_lookup} + if to_delete_lookup: + + # Get nodes and remove entry from lookup + to_remove = [] + for _id in to_delete_lookup: + # Get all related nodes + shapes = scene_lookup[_id] + # Get the parents of all shapes under the ID + transforms = cmds.listRelatives(shapes, + parent=True, + fullPath=True) or [] + to_remove.extend(shapes + transforms) + + # Remove id from lookup + scene_lookup.pop(_id, None) + + cmds.delete(to_remove) + + for cb_id, node_settings in meta_data_lookup.items(): + + if cb_id not in scene_lookup: + # Create new nodes + self.log.info("Creating new nodes ..") + + new_nodes = self.create_node(namespace, node_settings) + cmds.sets(new_nodes, addElement=container_node) + cmds.parent(new_nodes, container_root) + + else: + # Update the matching nodes + scene_nodes = scene_lookup[cb_id] + lookup_result = meta_data_lookup[cb_id]["name"] + + # Remove namespace if any (e.g.: "character_01_:head_YNShape") + node_name = lookup_result.rsplit(":", 1)[-1] + + for scene_node in scene_nodes: + + # Get transform node, this makes renaming easier + transforms = cmds.listRelatives(scene_node, + parent=True, + fullPath=True) or [] + assert len(transforms) == 1, "This is a bug!" + + # Get scene node's namespace and rename the transform node + lead = scene_node.rsplit(":", 1)[0] + namespace = ":{}".format(lead.rsplit("|")[-1]) + + new_shape_name = "{}:{}".format(namespace, node_name) + new_trans_name = new_shape_name.rsplit("Shape", 1)[0] + + transform_node = transforms[0] + cmds.rename(transform_node, + new_trans_name, + ignoreShape=False) + + # Get the newly named shape node + yeti_nodes = cmds.listRelatives(new_trans_name, + children=True) + yeti_node = yeti_nodes[0] + + for attr, value in node_settings["attrs"].items(): + if attr in SKIP_UPDATE_ATTRS: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) + continue + set_attribute(attr, value, yeti_node) + + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + was_value_set = create_yeti_variable( + yeti_shape_node=yeti_node, + attr_name=attr, + value=value, + # We do not want to update the + # value if it already exists so + # that any local overrides that + # may have been applied still + # persist + force_value=False + ) + if not was_value_set: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) + + cmds.setAttr("{}.representation".format(container_node), + repre_entity["id"], + typ="string") + + def switch(self, container, context): + self.update(container, context) + + # helper functions + def create_namespace(self, folder_name): + """Create a unique namespace + Args: + asset (dict): asset information + + """ + + asset_name = "{}_".format(folder_name) + prefix = "_" if asset_name[0].isdigit() else "" + namespace = lib.unique_namespace( + asset_name, + prefix=prefix, + suffix="_" + ) + + return namespace + + def get_cache_node_filepath(self, root, node_name): + """Get the cache file path for one of the yeti nodes. + + All caches with more than 1 frame need cache file name set with `%04d` + If the cache has only one frame we return the file name as we assume + it is a snapshot. + + This expects the files to be named after the "node name" through + exports with in Yeti. + + Args: + root(str): Folder containing cache files to search in. + node_name(str): Node name to search cache files for + + Returns: + str: Cache file path value needed for cacheFileName attribute + + """ + + name = node_name.replace(":", "_") + pattern = r"^({name})(\.[0-9]+)?(\.fur)$".format(name=re.escape(name)) + + files = [fname for fname in os.listdir(root) if re.match(pattern, + fname)] + if not files: + self.log.error("Could not find cache files for '{}' " + "with pattern {}".format(node_name, pattern)) + return + + if len(files) == 1: + # Single file + return os.path.join(root, files[0]) + + # Get filename for the sequence with padding + collections, remainder = clique.assemble(files) + assert not remainder, "This is a bug" + assert len(collections) == 1, "This is a bug" + collection = collections[0] + + # Formats name as {head}%d{tail} like cache.%04d.fur + fname = collection.format("{head}{padding}{tail}") + return os.path.join(root, fname) + + def create_node(self, namespace, node_settings): + """Create nodes with the correct namespace and settings + + Args: + namespace(str): namespace + node_settings(dict): Single "nodes" entry from .fursettings file. + + Returns: + list: Created nodes + + """ + nodes = [] + + # Get original names and ids + orig_transform_name = node_settings["transform"]["name"] + orig_shape_name = node_settings["name"] + + # Add namespace + transform_name = "{}:{}".format(namespace, orig_transform_name) + shape_name = "{}:{}".format(namespace, orig_shape_name) + + # Create pgYetiMaya node + transform_node = cmds.createNode("transform", + name=transform_name) + yeti_node = cmds.createNode("pgYetiMaya", + name=shape_name, + parent=transform_node) + + lib.set_id(transform_node, node_settings["transform"]["cbId"]) + lib.set_id(yeti_node, node_settings["cbId"]) + + nodes.extend([transform_node, yeti_node]) + + # Update attributes with defaults + attributes = node_settings["attrs"] + attributes.update({ + "verbosity": 2, + "fileMode": 1, + + # Fix render stats, like Yeti's own + # ../scripts/pgYetiNode.mel script + "visibleInReflections": True, + "visibleInRefractions": True + }) + + if "viewportDensity" not in attributes: + attributes["viewportDensity"] = 0.1 + + # Apply attributes to pgYetiMaya node + for attr, value in attributes.items(): + set_attribute(attr, value, yeti_node) + + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + create_yeti_variable(yeti_shape_node=yeti_node, + attr_name=attr, + value=value) + + # Connect to the time node + cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node) + + return nodes + + def read_settings(self, path): + """Read .fursettings file and compute some additional attributes""" + + with open(path, "r") as fp: + fur_settings = json.load(fp) + + if "nodes" not in fur_settings: + raise RuntimeError("Encountered invalid data, " + "expected 'nodes' in fursettings.") + + # Compute the cache file name values we want to set for the nodes + root = os.path.dirname(path) + for node in fur_settings["nodes"]: + cache_filename = self.get_cache_node_filepath( + root=root, node_name=node["name"]) + + attrs = node.get("attrs", {}) # allow 'attrs' to not exist + attrs["cacheFileName"] = cache_filename + node["attrs"] = attrs + + return fur_settings diff --git a/client/ayon_maya/plugins/load/load_yeti_rig.py b/client/ayon_maya/plugins/load/load_yeti_rig.py new file mode 100644 index 00000000..41129b02 --- /dev/null +++ b/client/ayon_maya/plugins/load/load_yeti_rig.py @@ -0,0 +1,97 @@ +from typing import List + +import maya.cmds as cmds + +from ayon_maya.api import plugin +from ayon_maya.api import lib + +from ayon_core.pipeline import registered_host +from ayon_core.pipeline.create import CreateContext + + +class YetiRigLoader(plugin.ReferenceLoader): + """This loader will load Yeti rig.""" + + product_types = {"yetiRig"} + representations = {"ma"} + + label = "Load Yeti Rig" + order = -9 + icon = "code-fork" + color = "orange" + + # From settings + create_cache_instance_on_load = True + + def process_reference( + self, context, name=None, namespace=None, options=None + ): + path = self.filepath_from_context(context) + + attach_to_root = options.get("attach_to_root", True) + group_name = options["group_name"] + + # no group shall be created + if not attach_to_root: + group_name = namespace + + with lib.maintained_selection(): + file_url = self.prepare_root_value( + path, context["project"]["name"] + ) + nodes = cmds.file( + file_url, + namespace=namespace, + reference=True, + returnNewNodes=True, + groupReference=attach_to_root, + groupName=group_name + ) + + color = plugin.get_load_color_for_product_type("yetiRig") + if color is not None: + red, green, blue = color + cmds.setAttr(group_name + ".useOutlinerColor", 1) + cmds.setAttr( + group_name + ".outlinerColor", red, green, blue + ) + self[:] = nodes + + if self.create_cache_instance_on_load: + # Automatically create in instance to allow publishing the loaded + # yeti rig into a yeti cache + self._create_yeti_cache_instance(nodes, variant=namespace) + + return nodes + + def _create_yeti_cache_instance(self, nodes: List[str], variant: str): + """Create a yeticache product type instance to publish the output. + + This is similar to how loading animation rig will automatically create + an animation instance for publishing any loaded character rigs, but + then for yeti rigs. + + Args: + nodes (List[str]): Nodes generated on load. + variant (str): Variant for the yeti cache instance to create. + + """ + + # Find the roots amongst the loaded nodes + yeti_nodes = cmds.ls(nodes, type="pgYetiMaya", long=True) + assert yeti_nodes, "No pgYetiMaya nodes in rig, this is a bug." + + self.log.info("Creating variant: {}".format(variant)) + + creator_identifier = "io.openpype.creators.maya.yeticache" + + host = registered_host() + create_context = CreateContext(host) + + with lib.maintained_selection(): + cmds.select(yeti_nodes, noExpand=True) + create_context.create( + creator_identifier=creator_identifier, + variant=variant, + pre_create_data={"use_selection": True} + ) diff --git a/client/ayon_maya/plugins/publish/__init__.py b/client/ayon_maya/plugins/publish/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/client/ayon_maya/plugins/publish/collect_animation.py b/client/ayon_maya/plugins/publish/collect_animation.py new file mode 100644 index 00000000..391c80c8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_animation.py @@ -0,0 +1,60 @@ +import pyblish.api + +import maya.cmds as cmds + + +class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin): + """Collect out hierarchy data for instance. + + Collect all hierarchy nodes which reside in the out_SET of the animation + instance or point cache instance. This is to unify the logic of retrieving + that specific data. This eliminates the need to write two separate pieces + of logic to fetch all hierarchy nodes. + + Results in a list of nodes from the content of the instances + + """ + + order = pyblish.api.CollectorOrder + 0.4 + families = ["animation"] + label = "Collect Animation Output Geometry" + hosts = ["maya"] + + ignore_type = ["constraints"] + + def process(self, instance): + """Collect the hierarchy nodes""" + + product_type = instance.data["productType"] + out_set = next((i for i in instance.data["setMembers"] if + i.endswith("out_SET")), None) + + if out_set is None: + self.log.warning(( + "Expecting out_SET for instance of product type '{}'" + ).format(product_type)) + return + + members = cmds.ls(cmds.sets(out_set, query=True), long=True) + + # Get all the relatives of the members + descendants = cmds.listRelatives(members, + allDescendents=True, + fullPath=True) or [] + descendants = cmds.ls(descendants, noIntermediate=True, long=True) + + # Add members and descendants together for a complete overview + + hierarchy = members + descendants + + # Ignore certain node types (e.g. constraints) + ignore = cmds.ls(hierarchy, type=self.ignore_type, long=True) + if ignore: + ignore = set(ignore) + hierarchy = [node for node in hierarchy if node not in ignore] + + # Store data in the instance for the validator + instance.data["out_hierarchy"] = hierarchy + + if instance.data.get("farm"): + instance.data["families"].append("publish.farm") diff --git a/client/ayon_maya/plugins/publish/collect_arnold_scene_source.py b/client/ayon_maya/plugins/publish/collect_arnold_scene_source.py new file mode 100644 index 00000000..cdeadd90 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_arnold_scene_source.py @@ -0,0 +1,58 @@ +from maya import cmds + +import pyblish.api +from ayon_maya.api.lib import get_all_children + + +class CollectArnoldSceneSource(pyblish.api.InstancePlugin): + """Collect Arnold Scene Source data.""" + + # Offset to be after renderable camera collection. + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Arnold Scene Source" + families = ["ass", "assProxy"] + + def process(self, instance): + instance.data["members"] = [] + for set_member in instance.data["setMembers"]: + if cmds.nodeType(set_member) != "objectSet": + instance.data["members"].extend(self.get_hierarchy(set_member)) + continue + + members = cmds.sets(set_member, query=True) + members = cmds.ls(members, long=True) + if members is None: + self.log.warning( + "Skipped empty instance: \"%s\" " % set_member + ) + continue + if set_member.endswith("proxy_SET"): + instance.data["proxy"] = self.get_hierarchy(members) + + # Use camera in object set if present else default to render globals + # camera. + cameras = cmds.ls(type="camera", long=True) + renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] + if renderable: + camera = renderable[0] + for node in instance.data["members"]: + camera_shapes = cmds.listRelatives( + node, shapes=True, type="camera" + ) + if camera_shapes: + camera = node + instance.data["camera"] = camera + else: + self.log.debug("No renderable cameras found.") + + self.log.debug("data: {}".format(instance.data)) + + def get_hierarchy(self, nodes): + """Return nodes with all their children""" + nodes = cmds.ls(nodes, long=True) + if not nodes: + return [] + children = get_all_children(nodes) + # Make sure nodes merged with children only + # contains unique entries + return list(set(nodes + list(children))) diff --git a/client/ayon_maya/plugins/publish/collect_assembly.py b/client/ayon_maya/plugins/publish/collect_assembly.py new file mode 100644 index 00000000..affb22c5 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_assembly.py @@ -0,0 +1,91 @@ +from collections import defaultdict +import pyblish.api + +from maya import cmds, mel +from ayon_maya import api +from ayon_maya.api import lib + +# TODO : Publish of assembly: -unique namespace for all assets, VALIDATOR! + + +class CollectAssembly(pyblish.api.InstancePlugin): + """Collect all relevant assembly items + + Collected data: + + * File name + * Compatible loader + * Matrix per instance + * Namespace + + Note: GPU caches are currently not supported in the pipeline. There is no + logic yet which supports the swapping of GPU cache to renderable objects. + + """ + + order = pyblish.api.CollectorOrder + 0.49 + label = "Assembly" + families = ["assembly"] + + def process(self, instance): + + # Find containers + containers = api.ls() + + # Get all content from the instance + instance_lookup = set(cmds.ls(instance, type="transform", long=True)) + data = defaultdict(list) + + hierarchy_nodes = [] + for container in containers: + + root = lib.get_container_transforms(container, root=True) + if not root or root not in instance_lookup: + continue + + # Retrieve the hierarchy + parent = cmds.listRelatives(root, parent=True, fullPath=True)[0] + hierarchy_nodes.append(parent) + + # Temporary warning for GPU cache which are not supported yet + loader = container["loader"] + if loader == "GpuCacheLoader": + self.log.warning("GPU Cache Loader is currently not supported" + "in the pipeline, we will export it tho") + + # Gather info for new data entry + representation_id = container["representation"] + instance_data = {"loader": loader, + "parent": parent, + "namespace": container["namespace"]} + + # Check if matrix differs from default and store changes + matrix_data = self.get_matrix_data(root) + if matrix_data: + instance_data["matrix"] = matrix_data + + data[representation_id].append(instance_data) + + instance.data["scenedata"] = dict(data) + instance.data["nodesHierarchy"] = list(set(hierarchy_nodes)) + + def get_file_rule(self, rule): + return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule)) + + def get_matrix_data(self, node): + """Get the matrix of all members when they are not default + + Each matrix which differs from the default will be stored in a + dictionary + + Args: + members (list): list of transform nmodes + Returns: + dict + """ + + matrix = cmds.xform(node, query=True, matrix=True) + if matrix == lib.DEFAULT_MATRIX: + return + + return matrix diff --git a/client/ayon_maya/plugins/publish/collect_current_file.py b/client/ayon_maya/plugins/publish/collect_current_file.py new file mode 100644 index 00000000..c7105a7f --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_current_file.py @@ -0,0 +1,16 @@ + +import pyblish.api + +from maya import cmds + + +class CollectCurrentFile(pyblish.api.ContextPlugin): + """Inject the current working file.""" + + order = pyblish.api.CollectorOrder - 0.4 + label = "Maya Current File" + hosts = ['maya'] + + def process(self, context): + """Inject the current working file""" + context.data['currentFile'] = cmds.file(query=True, sceneName=True) diff --git a/client/ayon_maya/plugins/publish/collect_fbx_animation.py b/client/ayon_maya/plugins/publish/collect_fbx_animation.py new file mode 100644 index 00000000..d8fd7a16 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_fbx_animation.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +from maya import cmds # noqa +import pyblish.api +from ayon_core.pipeline import OptionalPyblishPluginMixin + + +class CollectFbxAnimation(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Collect Animated Rig Data for FBX Extractor.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Fbx Animation" + hosts = ["maya"] + families = ["animation"] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + skeleton_sets = [ + i for i in instance + if i.endswith("skeletonAnim_SET") + ] + if not skeleton_sets: + return + + instance.data["families"].append("animation.fbx") + instance.data["animated_skeleton"] = [] + for skeleton_set in skeleton_sets: + skeleton_content = cmds.sets(skeleton_set, query=True) + self.log.debug( + "Collected animated skeleton data: {}".format( + skeleton_content + )) + if skeleton_content: + instance.data["animated_skeleton"] = skeleton_content diff --git a/client/ayon_maya/plugins/publish/collect_fbx_camera.py b/client/ayon_maya/plugins/publish/collect_fbx_camera.py new file mode 100644 index 00000000..bfa5bccb --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_fbx_camera.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +from maya import cmds # noqa +import pyblish.api + + +class CollectFbxCamera(pyblish.api.InstancePlugin): + """Collect Camera for FBX export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Camera for FBX export" + families = ["camera"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "fbx" not in instance.data["families"]: + instance.data["families"].append("fbx") + + instance.data["cameras"] = True diff --git a/client/ayon_maya/plugins/publish/collect_file_dependencies.py b/client/ayon_maya/plugins/publish/collect_file_dependencies.py new file mode 100644 index 00000000..60853bd1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_file_dependencies.py @@ -0,0 +1,32 @@ +from maya import cmds + +import pyblish.api + + +class CollectFileDependencies(pyblish.api.ContextPlugin): + """Gather all files referenced in this scene.""" + + label = "Collect File Dependencies" + order = pyblish.api.CollectorOrder - 0.49 + hosts = ["maya"] + families = ["renderlayer"] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if not used for deadline submission anyway + settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa + cls.enabled = settings.get("asset_dependencies", True) + + def process(self, context): + dependencies = set() + for node in cmds.ls(type="file"): + path = cmds.getAttr("{}.{}".format(node, "fileTextureName")) + if path not in dependencies: + dependencies.add(path) + + for node in cmds.ls(type="AlembicNode"): + path = cmds.getAttr("{}.{}".format(node, "abc_File")) + if path not in dependencies: + dependencies.add(path) + + context.data["fileDependencies"] = list(dependencies) diff --git a/client/ayon_maya/plugins/publish/collect_gltf.py b/client/ayon_maya/plugins/publish/collect_gltf.py new file mode 100644 index 00000000..bb37fe3a --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_gltf.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectGLTF(pyblish.api.InstancePlugin): + """Collect Assets for GLTF/GLB export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Asset for GLTF/GLB export" + families = ["model", "animation", "pointcache"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "gltf" not in instance.data["families"]: + instance.data["families"].append("gltf") diff --git a/client/ayon_maya/plugins/publish/collect_history.py b/client/ayon_maya/plugins/publish/collect_history.py new file mode 100644 index 00000000..2da74991 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_history.py @@ -0,0 +1,46 @@ +from maya import cmds + +import pyblish.api + + +class CollectMayaHistory(pyblish.api.InstancePlugin): + """Collect history for instances from the Maya scene + + Note: + This removes render layers collected in the history + + This is separate from Collect Instances so we can target it towards only + specific product types. + + """ + + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["maya"] + label = "Maya History" + families = ["rig"] + + def process(self, instance): + + kwargs = {} + if int(cmds.about(version=True)) >= 2020: + # New flag since Maya 2020 which makes cmds.listHistory faster + kwargs = {"fastIteration": True} + else: + self.log.debug("Ignoring `fastIteration` flag before Maya 2020..") + + # Collect the history with long names + history = set(cmds.listHistory(instance, leaf=False, **kwargs) or []) + history = cmds.ls(list(history), long=True) + + # Exclude invalid nodes (like renderlayers) + exclude = cmds.ls(type="renderLayer", long=True) + if exclude: + exclude = set(exclude) # optimize lookup + history = [x for x in history if x not in exclude] + + # Combine members with history + members = instance[:] + history + members = list(set(members)) # ensure unique + + # Update the instance + instance[:] = members diff --git a/client/ayon_maya/plugins/publish/collect_inputs.py b/client/ayon_maya/plugins/publish/collect_inputs.py new file mode 100644 index 00000000..28788ac5 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_inputs.py @@ -0,0 +1,213 @@ +import copy + +from maya import cmds +import maya.api.OpenMaya as om +import pyblish.api + +from ayon_core.pipeline import registered_host +from ayon_maya.api.lib import get_container_members +from ayon_maya.api.lib_rendersetup import get_shader_in_layer + + +def iter_history(nodes, + filter=om.MFn.kInvalid, + direction=om.MItDependencyGraph.kUpstream): + """Iterate unique upstream history for list of nodes. + + This acts as a replacement to maya.cmds.listHistory. + It's faster by about 2x-3x. It returns less than + maya.cmds.listHistory as it excludes the input nodes + from the output (unless an input node was history + for another input node). It also excludes duplicates. + + Args: + nodes (list): Maya node names to start search from. + filter (om.MFn.Type): Filter to only specific types. + e.g. to dag nodes using om.MFn.kDagNode + direction (om.MItDependencyGraph.Direction): Direction to traverse in. + Defaults to upstream. + + Yields: + str: Node names in upstream history. + + """ + if not nodes: + return + + sel = om.MSelectionList() + for node in nodes: + sel.add(node) + + it = om.MItDependencyGraph(sel.getDependNode(0)) # init iterator + handle = om.MObjectHandle + + traversed = set() + fn_dep = om.MFnDependencyNode() + fn_dag = om.MFnDagNode() + for i in range(sel.length()): + + start_node = sel.getDependNode(i) + start_node_hash = handle(start_node).hashCode() + if start_node_hash in traversed: + continue + + it.resetTo(start_node, + filter=filter, + direction=direction) + while not it.isDone(): + + node = it.currentNode() + node_hash = handle(node).hashCode() + + if node_hash in traversed: + it.prune() + it.next() # noqa: B305 + continue + + traversed.add(node_hash) + + if node.hasFn(om.MFn.kDagNode): + fn_dag.setObject(node) + yield fn_dag.fullPathName() + else: + fn_dep.setObject(node) + yield fn_dep.name() + + it.next() # noqa: B305 + + +def collect_input_containers(containers, nodes): + """Collect containers that contain any of the node in `nodes`. + + This will return any loaded AYON container that contains at least one of + the nodes. As such, the AYON container is an input for it. Or in short, + there are member nodes of that container. + + Returns: + list: Input loaded containers + + """ + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] + + +class CollectUpstreamInputs(pyblish.api.InstancePlugin): + """Collect input source inputs for this publish. + + This will include `inputs` data of which loaded publishes were used in the + generation of this publish. This leaves an upstream trace to what was used + as input. + + """ + + label = "Collect Inputs" + order = pyblish.api.CollectorOrder + 0.34 + hosts = ["maya"] + + def process(self, instance): + + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data["__cache_containers"] = scene_containers + + # Collect the relevant input containers for this instance + if "renderlayer" in set(instance.data.get("families", [])): + # Special behavior for renderlayers + self.log.debug("Collecting renderlayer inputs....") + containers = self._collect_renderlayer_inputs(scene_containers, + instance) + + else: + # Basic behavior + nodes = instance[:] + + # Include any input connections of history with long names + # For optimization purposes only trace upstream from shape nodes + # looking for used dag nodes. This way having just a constraint + # on a transform is also ignored which tended to give irrelevant + # inputs for the majority of our use cases. We tend to care more + # about geometry inputs. + shapes = cmds.ls(nodes, + type=("mesh", "nurbsSurface", "nurbsCurve"), + noIntermediate=True) + if shapes: + history = list(iter_history(shapes, filter=om.MFn.kShape)) + history = cmds.ls(history, long=True) + + # Include the transforms in the collected history as shapes + # are excluded from containers + transforms = cmds.listRelatives(cmds.ls(history, shapes=True), + parent=True, + fullPath=True, + type="transform") + if transforms: + history.extend(transforms) + + if history: + nodes = list(set(nodes + history)) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, + nodes) + + inputs = [c["representation"] for c in containers] + instance.data["inputRepresentations"] = inputs + self.log.debug("Collected inputs: %s" % inputs) + + def _collect_renderlayer_inputs(self, scene_containers, instance): + """Collects inputs from nodes in renderlayer, incl. shaders + camera""" + + # Get the renderlayer + renderlayer = instance.data.get("setMembers") + + if renderlayer == "defaultRenderLayer": + # Assume all loaded containers in the scene are inputs + # for the masterlayer + return copy.deepcopy(scene_containers) + else: + # Get the members of the layer + members = cmds.editRenderLayerMembers(renderlayer, + query=True, + fullNames=True) or [] + + # In some cases invalid objects are returned from + # `editRenderLayerMembers` so we filter them out + members = cmds.ls(members, long=True) + + # Include all children + children = cmds.listRelatives(members, + allDescendents=True, + fullPath=True) or [] + members.extend(children) + + # Include assigned shaders in renderlayer + shapes = cmds.ls(members, shapes=True, long=True) + shaders = set() + for shape in shapes: + shape_shaders = get_shader_in_layer(shape, layer=renderlayer) + if not shape_shaders: + continue + shaders.update(shape_shaders) + members.extend(shaders) + + # Explicitly include the camera being rendered in renderlayer + cameras = instance.data.get("cameras") + members.extend(cameras) + + containers = collect_input_containers(scene_containers, members) + + return containers diff --git a/client/ayon_maya/plugins/publish/collect_instances.py b/client/ayon_maya/plugins/publish/collect_instances.py new file mode 100644 index 00000000..758b977c --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_instances.py @@ -0,0 +1,114 @@ +from maya import cmds + +import pyblish.api +from ayon_maya.api.lib import get_all_children + + +class CollectNewInstances(pyblish.api.InstancePlugin): + """Gather members for instances and pre-defined attribute + + This collector takes into account assets that are associated with + an objectSet and marked with a unique identifier; + + Identifier: + id (str): "ayon.create.instance" + + Limitations: + - Does not take into account nodes connected to those + within an objectSet. Extractors are assumed to export + with history preserved, but this limits what they will + be able to achieve and the amount of data available + to validators. An additional collector could also + append this input data into the instance, as we do + for `pype.rig` with collect_history. + + """ + + label = "Collect New Instance Data" + order = pyblish.api.CollectorOrder + hosts = ["maya"] + + valid_empty_product_types = {"workfile", "renderlayer"} + + def process(self, instance): + + objset = instance.data.get("instance_node") + if not objset: + self.log.debug("Instance has no `instance_node` data") + + # TODO: We might not want to do this in the future + # Merge creator attributes into instance.data just backwards compatible + # code still runs as expected + creator_attributes = instance.data.get("creator_attributes", {}) + if creator_attributes: + instance.data.update(creator_attributes) + + members = cmds.sets(objset, query=True) or [] + if members: + # Collect members + members = cmds.ls(members, long=True) or [] + + # Collect full hierarchy + dag_members = cmds.ls(members, type="dagNode", long=True) + children = get_all_children(dag_members, + ignore_intermediate_objects=True) + + members_hierarchy = set(members) + members_hierarchy.update(children) + if creator_attributes.get("includeParentHierarchy", True): + members_hierarchy.update(self.get_all_parents(dag_members)) + + instance[:] = members_hierarchy + + elif ( + instance.data["productType"] not in self.valid_empty_product_types + ): + self.log.warning("Empty instance: \"%s\" " % objset) + # Store the exact members of the object set + instance.data["setMembers"] = members + + # TODO: This might make more sense as a separate collector + # Convert frame values to integers + for attr_name in ( + "handleStart", "handleEnd", "frameStart", "frameEnd", + ): + value = instance.data.get(attr_name) + if value is not None: + instance.data[attr_name] = int(value) + + # Append start frame and end frame to label if present + if "frameStart" in instance.data and "frameEnd" in instance.data: + # Take handles from context if not set locally on the instance + for key in ["handleStart", "handleEnd"]: + if key not in instance.data: + value = instance.context.data[key] + if value is not None: + value = int(value) + instance.data[key] = value + + instance.data["frameStartHandle"] = int( + instance.data["frameStart"] - instance.data["handleStart"] + ) + instance.data["frameEndHandle"] = int( + instance.data["frameEnd"] + instance.data["handleEnd"] + ) + + def get_all_parents(self, nodes): + """Get all parents by using string operations (optimization) + + Args: + nodes (iterable): the nodes which are found in the objectSet + + Returns: + set + """ + + parents = set() + for node in nodes: + split_parts = node.split("|") + items = [ + "|".join(split_parts[:i]) for i in range(2, len(split_parts)) + ] + parents.update(items) + + return parents diff --git a/client/ayon_maya/plugins/publish/collect_look.py b/client/ayon_maya/plugins/publish/collect_look.py new file mode 100644 index 00000000..f4bc2650 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_look.py @@ -0,0 +1,675 @@ +# -*- coding: utf-8 -*- +"""Maya look collector.""" +import re +import os +import glob + +from maya import cmds # noqa +import pyblish.api +from ayon_maya.api import lib + +SHAPE_ATTRS = {"castsShadows", + "receiveShadows", + "motionBlur", + "primaryVisibility", + "smoothShading", + "visibleInReflections", + "visibleInRefractions", + "doubleSided", + "opposite"} + + +def get_pxr_multitexture_file_attrs(node): + attrs = [] + for i in range(9): + if cmds.attributeQuery("filename{}".format(i), node=node, ex=True): + file = cmds.getAttr("{}.filename{}".format(node, i)) + if file: + attrs.append("filename{}".format(i)) + return attrs + + +FILE_NODES = { + # maya + "file": "fileTextureName", + # arnold (mtoa) + "aiImage": "filename", + # redshift + "RedshiftNormalMap": "tex0", + # renderman + "PxrBump": "filename", + "PxrNormalMap": "filename", + "PxrMultiTexture": get_pxr_multitexture_file_attrs, + "PxrPtexture": "filename", + "PxrTexture": "filename" +} + +RENDER_SET_TYPES = [ + "VRayDisplacement", + "VRayLightMesh", + "VRayObjectProperties", + "RedshiftObjectId", + "RedshiftMeshParameters", +] + +# Keep only node types that actually exist +all_node_types = set(cmds.allNodeTypes()) +for node_type in list(FILE_NODES.keys()): + if node_type not in all_node_types: + FILE_NODES.pop(node_type) + +RENDER_SET_TYPES = [node_type for node_type in RENDER_SET_TYPES + if node_type in all_node_types] +del all_node_types + +# Cache pixar dependency node types so we can perform a type lookup against it +PXR_NODES = set() +if cmds.pluginInfo("RenderMan_for_Maya", query=True, loaded=True): + PXR_NODES = set( + cmds.pluginInfo("RenderMan_for_Maya", + query=True, + dependNode=True) + ) + + +def get_attributes(dictionary, attr, node=None): + # type: (dict, str, str) -> list + if callable(dictionary[attr]): + val = dictionary[attr](node) + else: + val = dictionary.get(attr, []) + + return val if isinstance(val, list) else [val] + + +def get_look_attrs(node): + """Returns attributes of a node that are important for the look. + + These are the "changed" attributes (those that have edits applied + in the current scene). + + Returns: + list: Attribute names to extract + + """ + # When referenced get only attributes that are "changed since file open" + # which includes any reference edits, otherwise take *all* user defined + # attributes + is_referenced = cmds.referenceQuery(node, isNodeReferenced=True) + result = cmds.listAttr(node, userDefined=True, + changedSinceFileOpen=is_referenced) or [] + + # `cbId` is added when a scene is saved, ignore by default + if "cbId" in result: + result.remove("cbId") + + # For shapes allow render stat changes + if cmds.objectType(node, isAType="shape"): + attrs = cmds.listAttr(node, changedSinceFileOpen=True) or [] + for attr in attrs: + if attr in SHAPE_ATTRS or attr.startswith('ai'): + result.append(attr) + return result + + +def node_uses_image_sequence(node, node_path): + # type: (str, str) -> bool + """Return whether file node uses an image sequence or single image. + + Determine if a node uses an image sequence or just a single image, + not always obvious from its file path alone. + + Args: + node (str): Name of the Maya node + node_path (str): The file path of the node + + Returns: + bool: True if node uses an image sequence + + """ + + # useFrameExtension indicates an explicit image sequence + try: + use_frame_extension = cmds.getAttr('%s.useFrameExtension' % node) + except ValueError: + use_frame_extension = False + if use_frame_extension: + return True + + # The following tokens imply a sequence + patterns = ["", "", "", + "u_v", ""] + node_path_lowered = node_path.lower() + return any(pattern in node_path_lowered for pattern in patterns) + + +def seq_to_glob(path): + """Takes an image sequence path and returns it in glob format, + with the frame number replaced by a '*'. + + Image sequences may be numerical sequences, e.g. /path/to/file.1001.exr + will return as /path/to/file.*.exr. + + Image sequences may also use tokens to denote sequences, e.g. + /path/to/texture..tif will return as /path/to/texture.*.tif. + + Args: + path (str): the image sequence path + + Returns: + str: Return glob string that matches the filename pattern. + + """ + + if path is None: + return path + + # If any of the patterns, convert the pattern + patterns = { + "": "", + "": "", + "": "", + "#": "#", + "u_v": "|", + "", + "": "" + } + + lower = path.lower() + has_pattern = False + for pattern, regex_pattern in patterns.items(): + if pattern in lower: + path = re.sub(regex_pattern, "*", path, flags=re.IGNORECASE) + has_pattern = True + + if has_pattern: + return path + + base = os.path.basename(path) + matches = list(re.finditer(r'\d+', base)) + if matches: + match = matches[-1] + new_base = '{0}*{1}'.format(base[:match.start()], + base[match.end():]) + head = os.path.dirname(path) + return os.path.join(head, new_base) + else: + return path + + +def get_file_node_paths(node): + # type: (str) -> list + """Get the file path used by a Maya file node. + + Args: + node (str): Name of the Maya file node + + Returns: + list: the file paths in use + + """ + # if the path appears to be sequence, use computedFileTextureNamePattern, + # this preserves the <> tag + if cmds.attributeQuery('computedFileTextureNamePattern', + node=node, + exists=True): + plug = '{0}.computedFileTextureNamePattern'.format(node) + texture_pattern = cmds.getAttr(plug) + + patterns = ["", + "", + "u_v", + "", + ""] + lower = texture_pattern.lower() + if any(pattern in lower for pattern in patterns): + return [texture_pattern] + + try: + file_attributes = get_attributes( + FILE_NODES, cmds.nodeType(node), node) + except AttributeError: + file_attributes = "fileTextureName" + + files = [] + for file_attr in file_attributes: + if cmds.attributeQuery(file_attr, node=node, exists=True): + files.append(cmds.getAttr("{}.{}".format(node, file_attr))) + + return files + + +def get_file_node_files(node): + """Return the file paths related to the file node + + Note: + Will only return existing files. Returns an empty list + if not valid existing files are linked. + + Returns: + list: List of full file paths. + + """ + paths = get_file_node_paths(node) + + # For sequences get all files and filter to only existing files + result = [] + for path in paths: + if node_uses_image_sequence(node, path): + glob_pattern = seq_to_glob(path) + result.extend(glob.glob(glob_pattern)) + elif os.path.exists(path): + result.append(path) + + return result + + +class CollectLook(pyblish.api.InstancePlugin): + """Collect look data for instance. + + For the shapes/transforms of the referenced object to collect look for + retrieve the user-defined attributes (like V-ray attributes) and their + values as they were created in the current scene. + + For the members of the instance collect the sets (shadingEngines and + other sets, e.g. VRayDisplacement) they are in along with the exact + membership relations. + + Collects: + lookAttributes (list): Nodes in instance with their altered attributes + lookSetRelations (list): Sets and their memberships + lookSets (list): List of set names included in the look + + """ + + order = pyblish.api.CollectorOrder + 0.2 + families = ["look"] + label = "Collect Look" + hosts = ["maya"] + + def process(self, instance): + """Collect the Look in the instance with the correct layer settings""" + renderlayer = instance.data.get("renderlayer", "defaultRenderLayer") + with lib.renderlayer(renderlayer): + self.collect(instance) + + def collect(self, instance): + """Collect looks. + + Args: + instance (pyblish.api.Instance): Instance to collect. + + """ + self.log.debug("Looking for look associations " + "for %s" % instance.data['name']) + + # Discover related object sets + self.log.debug("Gathering sets ...") + sets = self.collect_sets(instance) + + # Lookup set (optimization) + instance_lookup = set(cmds.ls(instance, long=True)) + + self.log.debug("Gathering set relations ...") + # Ensure iteration happen in a list to allow removing keys from the + # dict within the loop + for obj_set in list(sets): + self.log.debug("From {}".format(obj_set)) + # Get all nodes of the current objectSet (shadingEngine) + for member in cmds.ls(cmds.sets(obj_set, query=True), long=True): + member_data = self.collect_member_data(member, + instance_lookup) + if member_data: + # Add information of the node to the members list + sets[obj_set]["members"].append(member_data) + + # Remove sets that didn't have any members assigned in the end + # Thus the data will be limited to only what we need. + if not sets[obj_set]["members"]: + self.log.debug( + "Removing redundant set information: {}".format(obj_set) + ) + sets.pop(obj_set, None) + + self.log.debug("Gathering attribute changes to instance members..") + attributes = self.collect_attributes_changed(instance) + + # Store data on the instance + instance.data["lookData"] = { + "attributes": attributes, + "relationships": sets + } + + # Collect file nodes used by shading engines (if we have any) + files = [] + look_sets = list(sets.keys()) + if look_sets: + self.log.debug("Found look sets: {}".format(look_sets)) + files = self.collect_file_nodes(look_sets) + + self.log.debug("Collected file nodes:\n{}".format(files)) + + # Collect texture resources if any file nodes are found + resources = [] + for node in files: + resources.extend(self.collect_resources(node)) + instance.data["resources"] = resources + self.log.debug("Collected resources: {}".format(resources)) + + # Log warning when no relevant sets were retrieved for the look. + if ( + not instance.data["lookData"]["relationships"] + and "model" not in self.families + ): + self.log.warning("No sets found for the nodes in the " + "instance: %s" % instance[:]) + + # Ensure unique shader sets + # Add shader sets to the instance for unify ID validation + instance.extend(shader for shader in look_sets if shader + not in instance_lookup) + + self.log.debug("Collected look for %s" % instance) + + def collect_file_nodes(self, look_sets): + """Get the entire node chain of the look sets and return file nodes + + Arguments: + look_sets (List[str]): List of sets and shading engines relevant + to the look. + + Returns: + List[str]: List of file node names. + + """ + + shader_attrs = [ + "surfaceShader", + "volumeShader", + "displacementShader", + "aiSurfaceShader", + "aiVolumeShader", + "rman__surface", + "rman__displacement" + ] + + # Get all material attrs for all look sets to retrieve their inputs + existing_attrs = [] + for look_set in look_sets: + for attr in shader_attrs: + if cmds.attributeQuery(attr, node=look_set, exists=True): + existing_attrs.append("{}.{}".format(look_set, attr)) + + materials = cmds.listConnections(existing_attrs, + source=True, + destination=False) or [] + + self.log.debug("Found materials:\n{}".format(materials)) + + # Get the entire node chain of the look sets + # history = cmds.listHistory(look_sets, allConnections=True) + # if materials list is empty, listHistory() will crash with + # RuntimeError + history = set() + if materials: + history.update(cmds.listHistory(materials, allConnections=True)) + + # Since we retrieved history only of the connected materials connected + # to the look sets above we now add direct history for some of the + # look sets directly handling render attribute sets + + # Maya (at least 2024) crashes with Warning when render set type + # isn't available. cmds.ls() will return empty list + if RENDER_SET_TYPES: + render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES) + if render_sets: + history.update( + cmds.listHistory(render_sets, + future=False, + pruneDagObjects=True) + or [] + ) + + # Get file nodes in the material history + files = cmds.ls(list(history), + # It's important only node types are passed that + # exist (e.g. for loaded plugins) because otherwise + # the result will turn back empty + type=list(FILE_NODES.keys()), + long=True) + + # Sort for log readability + files.sort() + + return files + + def collect_sets(self, instance): + """Collect all objectSets which are of importance for publishing + + It checks if all nodes in the instance are related to any objectSet + which need to be + + Args: + instance (pyblish.api.Instance): publish instance containing all + nodes to be published. + + Returns: + dict + """ + + sets = {} + for node in instance: + related_sets = lib.get_related_sets(node) + if not related_sets: + continue + + for objset in related_sets: + if objset in sets: + continue + + sets[objset] = {"uuid": lib.get_id(objset), "members": list()} + + return sets + + def collect_member_data(self, member, instance_members): + """Get all information of the node + Args: + member (str): the name of the node to check + instance_members (set): the collected instance members + + Returns: + dict + + """ + + node, components = (member.rsplit(".", 1) + [None])[:2] + + # Only include valid members of the instance + if node not in instance_members: + return + + node_id = lib.get_id(node) + if not node_id: + self.log.error("Member '{}' has no attribute 'cbId'".format(node)) + return + + member_data = {"name": node, "uuid": node_id} + if components: + member_data["components"] = components + + return member_data + + def collect_attributes_changed(self, instance): + """Collect all userDefined attributes which have changed + + Each node gets checked for user defined attributes which have been + altered during development. Each changes gets logged in a dictionary + + [{name: node, + uuid: uuid, + attributes: {attribute: value}}] + + Args: + instance (list): all nodes which will be published + + Returns: + list + """ + + attributes = [] + for node in instance: + + # Collect changes to "custom" attributes + node_attrs = get_look_attrs(node) + + # Only include if there are any properties we care about + if not node_attrs: + continue + + self.log.debug( + "Node \"{0}\" attributes: {1}".format(node, node_attrs) + ) + + node_attributes = {} + for attr in node_attrs: + if not cmds.attributeQuery(attr, node=node, exists=True): + continue + attribute = "{}.{}".format(node, attr) + # We don't support mixed-type attributes yet. + if cmds.attributeQuery(attr, node=node, multi=True): + self.log.warning("Attribute '{}' is mixed-type and is " + "not supported yet.".format(attribute)) + continue + if cmds.getAttr(attribute, type=True) == "message": + continue + node_attributes[attr] = cmds.getAttr(attribute, asString=True) + # Only include if there are any properties we care about + if not node_attributes: + continue + attributes.append({"name": node, + "uuid": lib.get_id(node), + "attributes": node_attributes}) + + return attributes + + def collect_resources(self, node): + """Collect the link to the file(s) used (resource) + Args: + node (str): name of the node + + Returns: + dict + """ + if cmds.nodeType(node) not in FILE_NODES: + self.log.error( + "Unsupported file node: {}".format(cmds.nodeType(node))) + raise AssertionError("Unsupported file node") + + self.log.debug( + "Collecting resource: {} ({})".format(node, cmds.nodeType(node)) + ) + + attributes = get_attributes(FILE_NODES, cmds.nodeType(node), node) + for attribute in attributes: + source = cmds.getAttr("{}.{}".format( + node, + attribute + )) + + self.log.debug(" - file source: {}".format(source)) + color_space_attr = "{}.colorSpace".format(node) + try: + color_space = cmds.getAttr(color_space_attr) + except ValueError: + # node doesn't have colorspace attribute + color_space = "Raw" + + # Compare with the computed file path, e.g. the one with + # the pattern in it, to generate some logging information + # about this difference + # Only for file nodes with `fileTextureName` attribute + if attribute == "fileTextureName": + computed_source = cmds.getAttr( + "{}.computedFileTextureNamePattern".format(node) + ) + if source != computed_source: + self.log.debug("Detected computed file pattern difference " + "from original pattern: {0} " + "({1} -> {2})".format(node, + source, + computed_source)) + + # renderman allows nodes to have filename attribute empty while + # you can have another incoming connection from different node. + if not source and cmds.nodeType(node) in PXR_NODES: + self.log.debug("Renderman: source is empty, skipping...") + continue + # We replace backslashes with forward slashes because V-Ray + # can't handle the UDIM files with the backslashes in the + # paths as the computed patterns + source = source.replace("\\", "/") + + files = get_file_node_files(node) + if len(files) == 0: + self.log.debug("No valid files found from node `%s`" % node) + + self.log.debug("collection of resource done:") + self.log.debug(" - node: {}".format(node)) + self.log.debug(" - attribute: {}".format(attribute)) + self.log.debug(" - source: {}".format(source)) + self.log.debug(" - file: {}".format(files)) + self.log.debug(" - color space: {}".format(color_space)) + + # Define the resource + yield { + "node": node, + # here we are passing not only attribute, but with node again + # this should be simplified and changed extractor. + "attribute": "{}.{}".format(node, attribute), + "source": source, # required for resources + "files": files, + "color_space": color_space + } + + +class CollectModelRenderSets(CollectLook): + """Collect render attribute sets for model instance. + + Collects additional render attribute sets so they can be + published with model. + + """ + + order = pyblish.api.CollectorOrder + 0.21 + families = ["model"] + label = "Collect Model Render Sets" + hosts = ["maya"] + + def collect_sets(self, instance): + """Collect all related objectSets except shadingEngines + + Args: + instance (pyblish.api.Instance): publish instance containing all + nodes to be published. + + Returns: + dict + """ + + sets = {} + for node in instance: + related_sets = lib.get_related_sets(node) + if not related_sets: + continue + + for objset in related_sets: + if objset in sets: + continue + + if cmds.objectType(objset, isAType="shadingEngine"): + continue + + sets[objset] = {"uuid": lib.get_id(objset), "members": list()} + + return sets diff --git a/client/ayon_maya/plugins/publish/collect_maya_scene_time.py b/client/ayon_maya/plugins/publish/collect_maya_scene_time.py new file mode 100644 index 00000000..6a20cb15 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_maya_scene_time.py @@ -0,0 +1,28 @@ +from maya import cmds + +import pyblish.api + + +class CollectMayaSceneTime(pyblish.api.InstancePlugin): + """Collect Maya Scene playback range + + This allows to reproduce the playback range for the content to be loaded. + It does *not* limit the extracted data to only data inside that time range. + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Maya Scene Time' + families = ["mayaScene"] + + def process(self, instance): + instance.data.update({ + "frameStart": int( + cmds.playbackOptions(query=True, minTime=True)), + "frameEnd": int( + cmds.playbackOptions(query=True, maxTime=True)), + "frameStartHandle": int( + cmds.playbackOptions(query=True, animationStartTime=True)), + "frameEndHandle": int( + cmds.playbackOptions(query=True, animationEndTime=True)) + }) diff --git a/client/ayon_maya/plugins/publish/collect_maya_units.py b/client/ayon_maya/plugins/publish/collect_maya_units.py new file mode 100644 index 00000000..2421641d --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_maya_units.py @@ -0,0 +1,30 @@ +import maya.cmds as cmds +import maya.mel as mel + +import pyblish.api + + +class CollectMayaUnits(pyblish.api.ContextPlugin): + """Collect Maya's scene units.""" + + label = "Maya Units" + order = pyblish.api.CollectorOrder + hosts = ["maya"] + + def process(self, context): + + # Get the current linear units + units = cmds.currentUnit(query=True, linear=True) + + # Get the current angular units ('deg' or 'rad') + units_angle = cmds.currentUnit(query=True, angle=True) + + # Get the current time units + # Using the mel command is simpler than using + # `cmds.currentUnit(q=1, time=1)`. Otherwise we + # have to parse the returned string value to FPS + fps = mel.eval('currentTimeUnitToFPS()') + + context.data['linearUnits'] = units + context.data['angularUnits'] = units_angle + context.data['fps'] = fps diff --git a/client/ayon_maya/plugins/publish/collect_maya_workspace.py b/client/ayon_maya/plugins/publish/collect_maya_workspace.py new file mode 100644 index 00000000..122fabe8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_maya_workspace.py @@ -0,0 +1,26 @@ +import os + +import pyblish.api + +from maya import cmds + + +class CollectMayaWorkspace(pyblish.api.ContextPlugin): + """Inject the current workspace into context""" + + order = pyblish.api.CollectorOrder - 0.5 + label = "Maya Workspace" + + hosts = ['maya'] + + def process(self, context): + workspace = cmds.workspace(rootDirectory=True, query=True) + if not workspace: + # Project has not been set. Files will + # instead end up next to the working file. + workspace = cmds.workspace(dir=True, query=True) + + # Maya returns forward-slashes by default + normalised = os.path.normpath(workspace) + + context.set_data('workspaceDir', value=normalised) diff --git a/client/ayon_maya/plugins/publish/collect_model.py b/client/ayon_maya/plugins/publish/collect_model.py new file mode 100644 index 00000000..9d45ed63 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_model.py @@ -0,0 +1,29 @@ +from maya import cmds + +import pyblish.api + + +class CollectModelData(pyblish.api.InstancePlugin): + """Collect model data + + Ensures always only a single frame is extracted (current frame). + + Todo: + Validate if is this plugin still useful. + + Note: + This is a workaround so that the `model` product type can use the + same pointcache extractor implementation as animation and pointcaches. + This always enforces the "current" frame to be published. + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Model Data' + families = ["model"] + + def process(self, instance): + # Extract only current frame (override) + frame = cmds.currentTime(query=True) + instance.data["frameStart"] = frame + instance.data["frameEnd"] = frame diff --git a/client/ayon_maya/plugins/publish/collect_multiverse_look.py b/client/ayon_maya/plugins/publish/collect_multiverse_look.py new file mode 100644 index 00000000..9910936e --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_multiverse_look.py @@ -0,0 +1,421 @@ +import glob +import os +import re + +from maya import cmds +import pyblish.api +from ayon_maya.api import lib + +SHAPE_ATTRS = ["castsShadows", + "receiveShadows", + "motionBlur", + "primaryVisibility", + "smoothShading", + "visibleInReflections", + "visibleInRefractions", + "doubleSided", + "opposite"] + +SHAPE_ATTRS = set(SHAPE_ATTRS) +COLOUR_SPACES = ['sRGB', 'linear', 'auto'] +MIPMAP_EXTENSIONS = ['tdl'] + + +class _NodeTypeAttrib(object): + """docstring for _NodeType""" + + def __init__(self, name, fname, computed_fname=None, colour_space=None): + self.name = name + self.fname = fname + self.computed_fname = computed_fname or fname + self.colour_space = colour_space or "colorSpace" + + def get_fname(self, node): + return "{}.{}".format(node, self.fname) + + def get_computed_fname(self, node): + return "{}.{}".format(node, self.computed_fname) + + def get_colour_space(self, node): + return "{}.{}".format(node, self.colour_space) + + def __str__(self): + return ( + "_NodeTypeAttrib(name={}, fname={}, " + "computed_fname={}, colour_space={})".format( + self.name, self.fname, self.computed_fname, self.colour_space) + ) + + +NODETYPES = { + "file": [_NodeTypeAttrib("file", "fileTextureName", + "computedFileTextureNamePattern")], + "aiImage": [_NodeTypeAttrib("aiImage", "filename")], + "RedshiftNormalMap": [_NodeTypeAttrib("RedshiftNormalMap", "tex0")], + "dlTexture": [_NodeTypeAttrib("dlTexture", "textureFile", + None, "textureFile_meta_colorspace")], + "dlTriplanar": [_NodeTypeAttrib("dlTriplanar", "colorTexture", + None, "colorTexture_meta_colorspace"), + _NodeTypeAttrib("dlTriplanar", "floatTexture", + None, "floatTexture_meta_colorspace"), + _NodeTypeAttrib("dlTriplanar", "heightTexture", + None, "heightTexture_meta_colorspace")] +} + + +def get_file_paths_for_node(node): + """Gets all the file paths in this node. + + Returns all filepaths that this node references. Some node types only + reference one, but others, like dlTriplanar, can reference 3. + + Args: + node (str): Name of the Maya node + + Returns + list(str): A list with all evaluated maya attributes for filepaths. + """ + + node_type = cmds.nodeType(node) + if node_type not in NODETYPES: + return [] + + paths = [] + for node_type_attr in NODETYPES[node_type]: + fname = cmds.getAttr("{}.{}".format(node, node_type_attr.fname)) + paths.append(fname) + return paths + + +def node_uses_image_sequence(node): + """Return whether file node uses an image sequence or single image. + + Determine if a node uses an image sequence or just a single image, + not always obvious from its file path alone. + + Args: + node (str): Name of the Maya node + + Returns: + bool: True if node uses an image sequence + + """ + + # useFrameExtension indicates an explicit image sequence + paths = get_file_node_paths(node) + paths = [path.lower() for path in paths] + + # The following tokens imply a sequence + patterns = ["", "", "", "u_v", ".tif will return as /path/to/texture.*.tif. + + Args: + path (str): the image sequence path + + Returns: + str: Return glob string that matches the filename pattern. + + """ + + if path is None: + return path + + # If any of the patterns, convert the pattern + patterns = { + "": "", + "": "", + "": "", + "#": "#", + "u_v": "|", + "", # noqa - copied from collect_look.py + "": "" + } + + lower = path.lower() + has_pattern = False + for pattern, regex_pattern in patterns.items(): + if pattern in lower: + path = re.sub(regex_pattern, "*", path, flags=re.IGNORECASE) + has_pattern = True + + if has_pattern: + return path + + base = os.path.basename(path) + matches = list(re.finditer(r'\d+', base)) + if matches: + match = matches[-1] + new_base = '{0}*{1}'.format(base[:match.start()], + base[match.end():]) + head = os.path.dirname(path) + return os.path.join(head, new_base) + else: + return path + + +def get_file_node_paths(node): + """Get the file path used by a Maya file node. + + Args: + node (str): Name of the Maya file node + + Returns: + str: the file path in use + + """ + # if the path appears to be sequence, use computedFileTextureNamePattern, + # this preserves the <> tag + if cmds.attributeQuery('computedFileTextureNamePattern', + node=node, + exists=True): + plug = '{0}.computedFileTextureNamePattern'.format(node) + texture_pattern = cmds.getAttr(plug) + + patterns = ["", + "", + "u_v", + "", + ""] + lower = texture_pattern.lower() + if any(pattern in lower for pattern in patterns): + return [texture_pattern] + + return get_file_paths_for_node(node) + + +def get_file_node_files(node): + """Return the file paths related to the file node + + Note: + Will only return existing files. Returns an empty list + if not valid existing files are linked. + + Returns: + list: List of full file paths. + + """ + + paths = get_file_node_paths(node) + paths = [cmds.workspace(expandName=path) for path in paths] + if node_uses_image_sequence(node): + globs = [] + for path in paths: + globs += glob.glob(seq_to_glob(path)) + return globs + else: + return list(filter(lambda x: os.path.exists(x), paths)) + + +def get_mipmap(fname): + for colour_space in COLOUR_SPACES: + for mipmap_ext in MIPMAP_EXTENSIONS: + mipmap_fname = '.'.join([fname, colour_space, mipmap_ext]) + if os.path.exists(mipmap_fname): + return mipmap_fname + return None + + +def is_mipmap(fname): + ext = os.path.splitext(fname)[1][1:] + if ext in MIPMAP_EXTENSIONS: + return True + return False + + +class CollectMultiverseLookData(pyblish.api.InstancePlugin): + """Collect Multiverse Look + + Searches through the overrides finding all material overrides. From there + it extracts the shading group and then finds all texture files in the + shading group network. It also checks for mipmap versions of texture files + and adds them to the resources to get published. + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Multiverse Look' + families = ["mvLook"] + + def process(self, instance): + # Load plugin first + cmds.loadPlugin("MultiverseForMaya", quiet=True) + import multiverse + + self.log.debug("Processing mvLook for '{}'".format(instance)) + + nodes = set() + for node in instance: + # We want only mvUsdCompoundShape nodes. + nodes_of_interest = cmds.ls(node, + dag=True, + shapes=False, + type="mvUsdCompoundShape", + noIntermediate=True, + long=True) + nodes.update(nodes_of_interest) + + sets = {} + instance.data["resources"] = [] + publishMipMap = instance.data["publishMipMap"] + + for node in nodes: + self.log.debug("Getting resources for '{}'".format(node)) + + # We know what nodes need to be collected, now we need to + # extract the materials overrides. + overrides = multiverse.ListMaterialOverridePrims(node) + for override in overrides: + matOver = multiverse.GetMaterialOverride(node, override) + + if isinstance(matOver, multiverse.MaterialSourceShadingGroup): + # We now need to grab the shadingGroup so add it to the + # sets we pass down the pipe. + shadingGroup = matOver.shadingGroupName + self.log.debug("ShadingGroup = '{}'".format(shadingGroup)) + sets[shadingGroup] = {"uuid": lib.get_id( + shadingGroup), "members": list()} + + # The SG may reference files, add those too! + history = cmds.listHistory( + shadingGroup, allConnections=True) + + # We need to iterate over node_types since `cmds.ls` may + # error out if we don't have the appropriate plugin loaded. + files = [] + for node_type in NODETYPES.keys(): + files += cmds.ls(history, + type=node_type, + long=True) + + for f in files: + resources = self.collect_resource(f, publishMipMap) + instance.data["resources"] += resources + + elif isinstance(matOver, multiverse.MaterialSourceUsdPath): + # TODO: Handle this later. + pass + + # Store data on the instance for validators, extractos, etc. + instance.data["lookData"] = { + "attributes": [], + "relationships": sets + } + + def collect_resource(self, node, publishMipMap): + """Collect the link to the file(s) used (resource) + Args: + node (str): name of the node + + Returns: + dict + """ + + node_type = cmds.nodeType(node) + self.log.debug("processing: {}/{}".format(node, node_type)) + + if node_type not in NODETYPES: + self.log.error("Unsupported file node: {}".format(node_type)) + raise AssertionError("Unsupported file node") + + resources = [] + for node_type_attr in NODETYPES[node_type]: + fname_attrib = node_type_attr.get_fname(node) + computed_fname_attrib = node_type_attr.get_computed_fname(node) + colour_space_attrib = node_type_attr.get_colour_space(node) + + source = cmds.getAttr(fname_attrib) + color_space = "Raw" + try: + color_space = cmds.getAttr(colour_space_attrib) + except ValueError: + # node doesn't have colorspace attribute, use "Raw" from before + pass + # Compare with the computed file path, e.g. the one with the + # pattern in it, to generate some logging information about this + # difference + # computed_attribute = "{}.computedFileTextureNamePattern".format(node) # noqa + computed_source = cmds.getAttr(computed_fname_attrib) + if source != computed_source: + self.log.debug("Detected computed file pattern difference " + "from original pattern: {0} " + "({1} -> {2})".format(node, + source, + computed_source)) + + # We replace backslashes with forward slashes because V-Ray + # can't handle the UDIM files with the backslashes in the + # paths as the computed patterns + source = source.replace("\\", "/") + + files = get_file_node_files(node) + files = self.handle_files(files, publishMipMap) + if len(files) == 0: + self.log.error("No valid files found from node `%s`" % node) + + self.log.debug("collection of resource done:") + self.log.debug(" - node: {}".format(node)) + self.log.debug(" - attribute: {}".format(fname_attrib)) + self.log.debug(" - source: {}".format(source)) + self.log.debug(" - file: {}".format(files)) + self.log.debug(" - color space: {}".format(color_space)) + + # Define the resource + resource = {"node": node, + "attribute": fname_attrib, + "source": source, # required for resources + "files": files, + "color_space": color_space} # required for resources + resources.append(resource) + return resources + + def handle_files(self, files, publishMipMap): + """This will go through all the files and make sure that they are + either already mipmapped or have a corresponding mipmap sidecar and + add that to the list.""" + if not publishMipMap: + return files + + extra_files = [] + self.log.debug("Expecting MipMaps, going to look for them.") + for fname in files: + self.log.debug("Checking '{}' for mipmaps".format(fname)) + if is_mipmap(fname): + self.log.debug(" - file is already MipMap, skipping.") + continue + + mipmap = get_mipmap(fname) + if mipmap: + self.log.debug(" mipmap found for '{}'".format(fname)) + extra_files.append(mipmap) + else: + self.log.warning(" no mipmap found for '{}'".format(fname)) + return files + extra_files diff --git a/client/ayon_maya/plugins/publish/collect_pointcache.py b/client/ayon_maya/plugins/publish/collect_pointcache.py new file mode 100644 index 00000000..5578a57f --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_pointcache.py @@ -0,0 +1,47 @@ +from maya import cmds + +import pyblish.api + + +class CollectPointcache(pyblish.api.InstancePlugin): + """Collect pointcache data for instance.""" + + order = pyblish.api.CollectorOrder + 0.4 + families = ["pointcache"] + label = "Collect Pointcache" + hosts = ["maya"] + + def process(self, instance): + if instance.data.get("farm"): + instance.data["families"].append("publish.farm") + + proxy_set = None + for node in cmds.ls(instance.data["setMembers"], + exactType="objectSet"): + # Find proxy_SET objectSet in the instance for proxy meshes + if node.endswith("proxy_SET"): + members = cmds.sets(node, query=True) + if members is None: + self.log.debug("Skipped empty proxy_SET: \"%s\" " % node) + continue + self.log.debug("Found proxy set: {}".format(node)) + + proxy_set = node + instance.data["proxy"] = [] + instance.data["proxyRoots"] = [] + for member in members: + instance.data["proxy"].extend(cmds.ls(member, long=True)) + instance.data["proxyRoots"].extend( + cmds.ls(member, long=True) + ) + instance.data["proxy"].extend( + cmds.listRelatives(member, shapes=True, fullPath=True) + ) + self.log.debug( + "Found proxy members: {}".format(instance.data["proxy"]) + ) + break + + if proxy_set: + instance.remove(proxy_set) + instance.data["setMembers"].remove(proxy_set) diff --git a/client/ayon_maya/plugins/publish/collect_remove_marked.py b/client/ayon_maya/plugins/publish/collect_remove_marked.py new file mode 100644 index 00000000..69e69f66 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_remove_marked.py @@ -0,0 +1,24 @@ +import pyblish.api + + +class CollectRemoveMarked(pyblish.api.ContextPlugin): + """Remove marked data + + Remove instances that have 'remove' in their instance.data + + """ + + order = pyblish.api.CollectorOrder + 0.499 + label = 'Remove Marked Instances' + + def process(self, context): + + self.log.debug(context) + # make ftrack publishable + instances_to_remove = [] + for instance in context: + if instance.data.get('remove'): + instances_to_remove.append(instance) + + for instance in instances_to_remove: + context.remove(instance) diff --git a/client/ayon_maya/plugins/publish/collect_render.py b/client/ayon_maya/plugins/publish/collect_render.py new file mode 100644 index 00000000..cacde155 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_render.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +"""Collect render data. + +This collector will go through renderlayer instances and prepare all data +needed to detect the expected rendered files for a layer, with resolution, +frame ranges and collects the data needed for publishing on the farm. + +Requires: + instance -> families + + context -> currentFile + context -> user + +Provides: + instance -> label + instance -> subset + instance -> attachTo + instance -> setMembers + instance -> publish + instance -> frameStart + instance -> frameEnd + instance -> byFrameStep + instance -> renderer + instance -> family + instance -> asset + instance -> time + instance -> author + instance -> source + instance -> expectedFiles + instance -> resolutionWidth + instance -> resolutionHeight + instance -> pixelAspect +""" + +import os +import platform +import json + +from maya import cmds + +import pyblish.api + +from ayon_core.pipeline import KnownPublishError +from ayon_core.lib import get_formatted_current_time +from ayon_maya.api.lib_renderproducts import ( + get as get_layer_render_products, + UnsupportedRendererException +) +from ayon_maya.api import lib + + +class CollectMayaRender(pyblish.api.InstancePlugin): + """Gather all publishable render layers from renderSetup.""" + + order = pyblish.api.CollectorOrder + 0.01 + hosts = ["maya"] + families = ["renderlayer"] + label = "Collect Render Layers" + sync_workfile_version = False + + _aov_chars = { + "dot": ".", + "dash": "-", + "underscore": "_" + } + + def process(self, instance): + + # TODO: Re-add force enable of workfile instance? + # TODO: Re-add legacy layer support with LAYER_ prefix but in Creator + context = instance.context + + layer = instance.data["transientData"]["layer"] + objset = instance.data.get("instance_node") + filepath = context.data["currentFile"].replace("\\", "/") + + # check if layer is renderable + if not layer.isRenderable(): + msg = "Render layer [ {} ] is not " "renderable".format( + layer.name() + ) + self.log.warning(msg) + + # detect if there are sets (products) to attach render to + sets = cmds.sets(objset, query=True) or [] + attach_to = [] + for s in sets: + if not cmds.attributeQuery("productType", node=s, exists=True): + continue + + attach_to.append( + { + "version": None, # we need integrator for that + "productName": s, + "productType": cmds.getAttr("{}.productType".format(s)), + } + ) + self.log.debug(" -> attach render to: {}".format(s)) + + layer_name = layer.name() + + # collect all frames we are expecting to be rendered + # return all expected files for all cameras and aovs in given + # frame range + try: + layer_render_products = get_layer_render_products(layer.name()) + except UnsupportedRendererException as exc: + raise KnownPublishError(exc) + render_products = layer_render_products.layer_data.products + if not render_products: + self.log.error( + "No render products generated for '%s'. You might not have " + "any render camera in the renderlayer or render end frame is " + "lower than start frame.", + instance.name + ) + expected_files = [] + multipart = False + for product in render_products: + if product.multipart: + multipart = True + product_name = product.productName + if product.camera and layer_render_products.has_camera_token(): + product_name = "{}{}".format( + product.camera, + "_{}".format(product_name) if product_name else "") + expected_files.append( + { + product_name: layer_render_products.get_files( + product) + }) + + has_cameras = any(product.camera for product in render_products) + if render_products and not has_cameras: + self.log.error( + "No render cameras found for: %s", + instance + ) + if not expected_files: + self.log.warning( + "No file names were generated, this is a bug.") + + for render_product in render_products: + self.log.debug(render_product) + self.log.debug("multipart: {}".format(multipart)) + self.log.debug("expected files: {}".format( + json.dumps(expected_files, indent=4, sort_keys=True) + )) + + # if we want to attach render to product, check if we have AOV's + # in expectedFiles. If so, raise error as we cannot attach AOV + # (considered to be product on its own) to another product + if attach_to: + assert isinstance(expected_files, list), ( + "attaching multiple AOVs or renderable cameras to " + "product is not supported" + ) + + # append full path + image_directory = os.path.join( + cmds.workspace(query=True, rootDirectory=True), + cmds.workspace(fileRuleEntry="images") + ) + # replace relative paths with absolute. Render products are + # returned as list of dictionaries. + publish_meta_path = "NOT-SET" + aov_dict = {} + for aov in expected_files: + full_paths = [] + aov_first_key = list(aov.keys())[0] + for file in aov[aov_first_key]: + full_path = os.path.join(image_directory, file) + full_path = full_path.replace("\\", "/") + full_paths.append(full_path) + publish_meta_path = os.path.dirname(full_path) + aov_dict[aov_first_key] = full_paths + full_exp_files = [aov_dict] + + frame_start_render = int(self.get_render_attribute( + "startFrame", layer=layer_name)) + frame_end_render = int(self.get_render_attribute( + "endFrame", layer=layer_name)) + + if (int(context.data["frameStartHandle"]) == frame_start_render + and int(context.data["frameEndHandle"]) == frame_end_render): # noqa: W503, E501 + + handle_start = context.data["handleStart"] + handle_end = context.data["handleEnd"] + frame_start = context.data["frameStart"] + frame_end = context.data["frameEnd"] + frame_start_handle = context.data["frameStartHandle"] + frame_end_handle = context.data["frameEndHandle"] + else: + handle_start = 0 + handle_end = 0 + frame_start = frame_start_render + frame_end = frame_end_render + frame_start_handle = frame_start_render + frame_end_handle = frame_end_render + + # find common path to store metadata + # so if image prefix is branching to many directories + # metadata file will be located in top-most common + # directory. + # TODO: use `os.path.commonpath()` after switch to Python 3 + publish_meta_path = os.path.normpath(publish_meta_path) + common_publish_meta_path = os.path.splitdrive( + publish_meta_path)[0] + if common_publish_meta_path: + common_publish_meta_path += os.path.sep + for part in publish_meta_path.replace( + common_publish_meta_path, "").split(os.path.sep): + common_publish_meta_path = os.path.join( + common_publish_meta_path, part) + if part == layer_name: + break + + # TODO: replace this terrible linux hotfix with real solution :) + if platform.system().lower() in ["linux", "darwin"]: + common_publish_meta_path = "/" + common_publish_meta_path + + self.log.debug( + "Publish meta path: {}".format(common_publish_meta_path) + ) + + # Get layer specific settings, might be overrides + colorspace_data = lib.get_color_management_preferences() + data = { + "farm": True, + "attachTo": attach_to, + + "multipartExr": multipart, + "review": instance.data.get("review") or False, + + # Frame range + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": frame_start, + "frameEnd": frame_end, + "frameStartHandle": frame_start_handle, + "frameEndHandle": frame_end_handle, + "byFrameStep": int( + self.get_render_attribute("byFrameStep", + layer=layer_name)), + + # Renderlayer + "renderer": self.get_render_attribute( + "currentRenderer", layer=layer_name).lower(), + "setMembers": layer._getLegacyNodeName(), # legacy renderlayer + "renderlayer": layer_name, + + # todo: is `time` and `author` still needed? + "time": get_formatted_current_time(), + "author": context.data["user"], + + # Add source to allow tracing back to the scene from + # which was submitted originally + "source": filepath, + "expectedFiles": full_exp_files, + "publishRenderMetadataFolder": common_publish_meta_path, + "renderProducts": layer_render_products, + "resolutionWidth": lib.get_attr_in_layer( + "defaultResolution.width", layer=layer_name + ), + "resolutionHeight": lib.get_attr_in_layer( + "defaultResolution.height", layer=layer_name + ), + "pixelAspect": lib.get_attr_in_layer( + "defaultResolution.pixelAspect", layer=layer_name + ), + + # todo: Following are likely not needed due to collecting from the + # instance itself if they are attribute definitions + "tileRendering": instance.data.get("tileRendering") or False, # noqa: E501 + "tilesX": instance.data.get("tilesX") or 2, + "tilesY": instance.data.get("tilesY") or 2, + "convertToScanline": instance.data.get( + "convertToScanline") or False, + "useReferencedAovs": instance.data.get( + "useReferencedAovs") or instance.data.get( + "vrayUseReferencedAovs") or False, + "aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501 + "renderSetupIncludeLights": instance.data.get( + "renderSetupIncludeLights" + ), + "colorspaceConfig": colorspace_data["config"], + "colorspaceDisplay": colorspace_data["display"], + "colorspaceView": colorspace_data["view"], + } + + manager = context.data["ayonAddonsManager"] + if manager.get_enabled_addon("royalrender") is not None: + data["rrPathName"] = instance.data.get("rrPathName") + self.log.debug(data["rrPathName"]) + + if self.sync_workfile_version: + data["version"] = context.data["version"] + for _instance in context: + if _instance.data["productType"] == "workfile": + _instance.data["version"] = context.data["version"] + + # Define nice label + label = "{0} ({1})".format(layer_name, instance.data["folderPath"]) + label += " [{0}-{1}]".format( + int(data["frameStartHandle"]), int(data["frameEndHandle"]) + ) + data["label"] = label + + # Override frames should be False if extendFrames is False. This is + # to ensure it doesn't go off doing crazy unpredictable things + extend_frames = instance.data.get("extendFrames", False) + if not extend_frames: + instance.data["overrideExistingFrame"] = False + + # Update the instance + instance.data.update(data) + + @staticmethod + def get_render_attribute(attr, layer): + """Get attribute from render options. + + Args: + attr (str): name of attribute to be looked up + layer (str): name of render layer + + Returns: + Attribute value + + """ + return lib.get_attr_in_layer( + "defaultRenderGlobals.{}".format(attr), layer=layer + ) diff --git a/client/ayon_maya/plugins/publish/collect_render_layer_aovs.py b/client/ayon_maya/plugins/publish/collect_render_layer_aovs.py new file mode 100644 index 00000000..d23a4edb --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_render_layer_aovs.py @@ -0,0 +1,97 @@ +from maya import cmds + +import pyblish.api + +from ayon_maya.api import lib + + +class CollectRenderLayerAOVS(pyblish.api.InstancePlugin): + """Collect all render layer's AOVs / Render Elements that will render. + + This collector is important to be able to Extend Frames. + + Technical information: + Each renderer uses different logic to work with render passes. + VRay - RenderElement + Simple node connection to the actual renderLayer node + + Arnold - AOV: + Uses its own render settings node and connects an aiOAV to it + + Redshift - AOV: + Uses its own render settings node and RedshiftAOV node. It is not + connected but all AOVs are enabled for all render layers by default. + + """ + + order = pyblish.api.CollectorOrder + 0.02 + label = "Render Elements / AOVs" + hosts = ["maya"] + families = ["renderlayer"] + + def process(self, instance): + + # Check if Extend Frames is toggled + if not instance.data("extendFrames", False): + return + + # Get renderer + renderer = instance.data["renderer"] + self.log.debug("Renderer found: {}".format(renderer)) + + rp_node_types = {"vray": ["VRayRenderElement", "VRayRenderElementSet"], + "arnold": ["aiAOV"], + "redshift": ["RedshiftAOV"]} + + if renderer not in rp_node_types.keys(): + self.log.error("Unsupported renderer found: '{}'".format(renderer)) + return + + result = [] + + # Collect all AOVs / Render Elements + layer = instance.data["renderlayer"] + node_type = rp_node_types[renderer] + render_elements = cmds.ls(type=node_type) + + # Check if AOVs / Render Elements are enabled + for element in render_elements: + enabled = lib.get_attr_in_layer("{}.enabled".format(element), + layer=layer) + if not enabled: + continue + + pass_name = self.get_pass_name(renderer, element) + render_pass = "%s.%s" % (instance.data["productName"], pass_name) + + result.append(render_pass) + + self.log.debug("Found {} render elements / AOVs for " + "'{}'".format(len(result), instance.data["productName"])) + + instance.data["renderPasses"] = result + + def get_pass_name(self, renderer, node): + + if renderer == "vray": + + # Get render element pass type + vray_node_attr = next(attr for attr in cmds.listAttr(node) + if attr.startswith("vray_name")) + pass_type = vray_node_attr.rsplit("_", 1)[-1] + + # Support V-Ray extratex explicit name (if set by user) + if pass_type == "extratex": + explicit_attr = "{}.vray_explicit_name_extratex".format(node) + explicit_name = cmds.getAttr(explicit_attr) + if explicit_name: + return explicit_name + + # Node type is in the attribute name but we need to check if value + # of the attribute as it can be changed + return cmds.getAttr("{}.{}".format(node, vray_node_attr)) + + elif renderer in ["arnold", "redshift"]: + return cmds.getAttr("{}.name".format(node)) + else: + raise RuntimeError("Unsupported renderer: '{}'".format(renderer)) diff --git a/client/ayon_maya/plugins/publish/collect_renderable_camera.py b/client/ayon_maya/plugins/publish/collect_renderable_camera.py new file mode 100644 index 00000000..d2d05971 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_renderable_camera.py @@ -0,0 +1,32 @@ +import pyblish.api + +from maya import cmds + +from ayon_maya.api.lib_rendersetup import get_attr_in_layer + + +class CollectRenderableCamera(pyblish.api.InstancePlugin): + """Collect the renderable camera(s) for the render layer""" + + # Offset to be after renderlayer collection. + order = pyblish.api.CollectorOrder + 0.02 + label = "Collect Renderable Camera(s)" + hosts = ["maya"] + families = ["vrayscene_layer", + "renderlayer"] + + def process(self, instance): + if "vrayscene_layer" in instance.data.get("families", []): + layer = instance.data.get("layer") + else: + layer = instance.data["renderlayer"] + + cameras = cmds.ls(type="camera", long=True) + renderable = [cam for cam in cameras if + get_attr_in_layer("{}.renderable".format(cam), layer)] + + self.log.debug( + "Found renderable cameras %s: %s", len(renderable), renderable + ) + + instance.data["cameras"] = renderable diff --git a/client/ayon_maya/plugins/publish/collect_review.py b/client/ayon_maya/plugins/publish/collect_review.py new file mode 100644 index 00000000..490e197c --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_review.py @@ -0,0 +1,186 @@ +from maya import cmds, mel + +import ayon_api +import pyblish.api + +from ayon_core.pipeline import KnownPublishError +from ayon_maya.api import lib + + +class CollectReview(pyblish.api.InstancePlugin): + """Collect Review data + + """ + + order = pyblish.api.CollectorOrder + 0.3 + label = 'Collect Review Data' + families = ["review"] + + def process(self, instance): + + # Get panel. + instance.data["panel"] = cmds.playblast( + activeEditor=True + ).rsplit("|", 1)[-1] + + # get cameras + members = instance.data['setMembers'] + self.log.debug('members: {}'.format(members)) + cameras = cmds.ls(members, long=True, dag=True, cameras=True) + camera = cameras[0] if cameras else None + + context = instance.context + objectset = { + i.data.get("instance_node") for i in context + } + + # Collect display lights. + display_lights = instance.data.get("displayLights", "default") + if display_lights == "project_settings": + settings = instance.context.data["project_settings"] + settings = settings["maya"]["publish"]["ExtractPlayblast"] + settings = settings["capture_preset"]["ViewportOptions"] + display_lights = settings["displayLights"] + + # Collect camera focal length. + burninDataMembers = instance.data.get("burninDataMembers", {}) + if camera is not None: + attr = camera + ".focalLength" + if lib.get_attribute_input(attr): + start = instance.data["frameStart"] + end = instance.data["frameEnd"] + 1 + time_range = range(int(start), int(end)) + focal_length = [cmds.getAttr(attr, time=t) for t in time_range] + else: + focal_length = cmds.getAttr(attr) + + burninDataMembers["focalLength"] = focal_length + + # Account for nested instances like model. + reviewable_products = list(set(members) & objectset) + if reviewable_products: + if len(reviewable_products) > 1: + raise KnownPublishError( + "Multiple attached products for review are not supported. " + "Attached: {}".format(", ".join(reviewable_products)) + ) + + reviewable_product = reviewable_products[0] + self.log.debug( + "Product attached to review: {}".format(reviewable_product) + ) + + # Find the relevant publishing instance in the current context + reviewable_inst = next(inst for inst in context + if inst.name == reviewable_product) + data = reviewable_inst.data + + self.log.debug( + 'Adding review family to {}'.format(reviewable_product) + ) + if data.get('families'): + data['families'].append('review') + else: + data['families'] = ['review'] + + data["cameras"] = cameras + data['review_camera'] = camera + data['frameStartFtrack'] = instance.data["frameStartHandle"] + data['frameEndFtrack'] = instance.data["frameEndHandle"] + data['frameStartHandle'] = instance.data["frameStartHandle"] + data['frameEndHandle'] = instance.data["frameEndHandle"] + data['handleStart'] = instance.data["handleStart"] + data['handleEnd'] = instance.data["handleEnd"] + data["frameStart"] = instance.data["frameStart"] + data["frameEnd"] = instance.data["frameEnd"] + data['step'] = instance.data['step'] + # this (with other time related data) should be set on + # representations. Once plugins like Extract Review start + # using representations, this should be removed from here + # as Extract Playblast is already adding fps to representation. + data['fps'] = context.data['fps'] + data['review_width'] = instance.data['review_width'] + data['review_height'] = instance.data['review_height'] + data["isolate"] = instance.data["isolate"] + data["panZoom"] = instance.data.get("panZoom", False) + data["panel"] = instance.data["panel"] + data["displayLights"] = display_lights + data["burninDataMembers"] = burninDataMembers + + for key, value in instance.data["publish_attributes"].items(): + data["publish_attributes"][key] = value + + # The review instance must be active + cmds.setAttr(str(instance) + '.active', 1) + + instance.data['remove'] = True + + else: + project_name = instance.context.data["projectName"] + folder_entity = instance.context.data["folderEntity"] + task = instance.context.data["task"] + legacy_product_name = task + 'Review' + product_entity = ayon_api.get_product_by_name( + project_name, + legacy_product_name, + folder_entity["id"], + fields={"id"} + ) + if product_entity: + self.log.debug("Existing products found, keep legacy name.") + instance.data["productName"] = legacy_product_name + + instance.data["cameras"] = cameras + instance.data['review_camera'] = camera + instance.data['frameStartFtrack'] = \ + instance.data["frameStartHandle"] + instance.data['frameEndFtrack'] = \ + instance.data["frameEndHandle"] + instance.data["displayLights"] = display_lights + instance.data["burninDataMembers"] = burninDataMembers + # this (with other time related data) should be set on + # representations. Once plugins like Extract Review start + # using representations, this should be removed from here + # as Extract Playblast is already adding fps to representation. + instance.data["fps"] = instance.context.data["fps"] + + # make ftrack publishable + instance.data.setdefault("families", []).append('ftrack') + + cmds.setAttr(str(instance) + '.active', 1) + + # Collect audio + playback_slider = mel.eval('$tmpVar=$gPlayBackSlider') + audio_name = cmds.timeControl(playback_slider, + query=True, + sound=True) + display_sounds = cmds.timeControl( + playback_slider, query=True, displaySound=True + ) + + def get_audio_node_data(node): + return { + "offset": cmds.getAttr("{}.offset".format(node)), + "filename": cmds.getAttr("{}.filename".format(node)) + } + + audio_data = [] + + if audio_name: + audio_data.append(get_audio_node_data(audio_name)) + + elif display_sounds: + start_frame = int(cmds.playbackOptions(query=True, min=True)) + end_frame = int(cmds.playbackOptions(query=True, max=True)) + + for node in cmds.ls(type="audio"): + # Check if frame range and audio range intersections, + # for whether to include this audio node or not. + duration = cmds.getAttr("{}.duration".format(node)) + start_audio = cmds.getAttr("{}.offset".format(node)) + end_audio = start_audio + duration + + if start_audio <= end_frame and end_audio > start_frame: + audio_data.append(get_audio_node_data(node)) + + instance.data["audio"] = audio_data diff --git a/client/ayon_maya/plugins/publish/collect_rig_sets.py b/client/ayon_maya/plugins/publish/collect_rig_sets.py new file mode 100644 index 00000000..34ff26a8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_rig_sets.py @@ -0,0 +1,40 @@ +import pyblish.api +from maya import cmds + + +class CollectRigSets(pyblish.api.InstancePlugin): + """Ensure rig contains pipeline-critical content + + Every rig must contain at least two object sets: + "controls_SET" - Set of all animatable controls + "out_SET" - Set of all cacheable meshes + + """ + + order = pyblish.api.CollectorOrder + 0.05 + label = "Collect Rig Sets" + hosts = ["maya"] + families = ["rig"] + + accepted_output = ["mesh", "transform"] + accepted_controllers = ["transform"] + + def process(self, instance): + + # Find required sets by suffix + searching = {"controls_SET", "out_SET", + "skeletonAnim_SET", "skeletonMesh_SET"} + found = {} + for node in cmds.ls(instance, exactType="objectSet"): + for suffix in searching: + if node.endswith(suffix): + found[suffix] = node + searching.remove(suffix) + break + if not searching: + break + + self.log.debug("Found sets: {}".format(found)) + rig_sets = instance.data.setdefault("rig_sets", {}) + for name, objset in found.items(): + rig_sets[name] = objset diff --git a/client/ayon_maya/plugins/publish/collect_skeleton_mesh.py b/client/ayon_maya/plugins/publish/collect_skeleton_mesh.py new file mode 100644 index 00000000..31f0eca8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_skeleton_mesh.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +from maya import cmds # noqa +import pyblish.api + + +class CollectSkeletonMesh(pyblish.api.InstancePlugin): + """Collect Static Rig Data for FBX Extractor.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Skeleton Mesh" + hosts = ["maya"] + families = ["rig"] + + def process(self, instance): + skeleton_mesh_set = instance.data["rig_sets"].get( + "skeletonMesh_SET") + if not skeleton_mesh_set: + self.log.debug( + "No skeletonMesh_SET found. " + "Skipping collecting of skeleton mesh..." + ) + return + + # Store current frame to ensure single frame export + frame = cmds.currentTime(query=True) + instance.data["frameStart"] = frame + instance.data["frameEnd"] = frame + + instance.data["skeleton_mesh"] = [] + + skeleton_mesh_content = cmds.sets( + skeleton_mesh_set, query=True) or [] + if not skeleton_mesh_content: + self.log.debug( + "No object nodes in skeletonMesh_SET. " + "Skipping collecting of skeleton mesh..." + ) + return + instance.data["families"] += ["rig.fbx"] + instance.data["skeleton_mesh"] = skeleton_mesh_content + self.log.debug( + "Collected skeletonMesh_SET members: {}".format( + skeleton_mesh_content + )) diff --git a/client/ayon_maya/plugins/publish/collect_unreal_skeletalmesh.py b/client/ayon_maya/plugins/publish/collect_unreal_skeletalmesh.py new file mode 100644 index 00000000..79693bb3 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_unreal_skeletalmesh.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +from maya import cmds # noqa +import pyblish.api + + +class CollectUnrealSkeletalMesh(pyblish.api.InstancePlugin): + """Collect Unreal Skeletal Mesh.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Unreal Skeletal Meshes" + families = ["skeletalMesh"] + + def process(self, instance): + frame = cmds.currentTime(query=True) + instance.data["frameStart"] = frame + instance.data["frameEnd"] = frame + + geo_sets = [ + i for i in instance[:] + if i.lower().startswith("geometry_set") + ] + + joint_sets = [ + i for i in instance[:] + if i.lower().startswith("joints_set") + ] + + instance.data["geometry"] = [] + instance.data["joints"] = [] + + for geo_set in geo_sets: + geo_content = cmds.ls(cmds.sets(geo_set, query=True), long=True) + if geo_content: + instance.data["geometry"] += geo_content + + for join_set in joint_sets: + join_content = cmds.ls(cmds.sets(join_set, query=True), long=True) + if join_content: + instance.data["joints"] += join_content diff --git a/client/ayon_maya/plugins/publish/collect_unreal_staticmesh.py b/client/ayon_maya/plugins/publish/collect_unreal_staticmesh.py new file mode 100644 index 00000000..03b6c4a1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_unreal_staticmesh.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +from maya import cmds # noqa +import pyblish.api +from pprint import pformat + + +class CollectUnrealStaticMesh(pyblish.api.InstancePlugin): + """Collect Unreal Static Mesh.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Unreal Static Meshes" + families = ["staticMesh"] + + def process(self, instance): + geometry_set = [ + i for i in instance + if i.startswith("geometry_SET") + ] + instance.data["geometryMembers"] = cmds.sets( + geometry_set, query=True) + + self.log.debug("geometry: {}".format( + pformat(instance.data.get("geometryMembers")))) + + collision_set = [ + i for i in instance + if i.startswith("collisions_SET") + ] + instance.data["collisionMembers"] = cmds.sets( + collision_set, query=True) + + self.log.debug("collisions: {}".format( + pformat(instance.data.get("collisionMembers")))) + + frame = cmds.currentTime(query=True) + instance.data["frameStart"] = frame + instance.data["frameEnd"] = frame diff --git a/client/ayon_maya/plugins/publish/collect_user_defined_attributes.py b/client/ayon_maya/plugins/publish/collect_user_defined_attributes.py new file mode 100644 index 00000000..3d586d48 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_user_defined_attributes.py @@ -0,0 +1,41 @@ +from maya import cmds + +import pyblish.api + + +class CollectUserDefinedAttributes(pyblish.api.InstancePlugin): + """Collect user defined attributes for nodes in instance.""" + + order = pyblish.api.CollectorOrder + 0.45 + families = ["pointcache", "animation", "usd"] + label = "Collect User Defined Attributes" + hosts = ["maya"] + + def process(self, instance): + + # Collect user defined attributes. + if not instance.data["creator_attributes"].get( + "includeUserDefinedAttributes" + ): + return + + if "out_hierarchy" in instance.data: + # animation family + nodes = instance.data["out_hierarchy"] + else: + nodes = instance[:] + if not nodes: + return + + shapes = cmds.listRelatives(nodes, shapes=True, fullPath=True) or [] + nodes = set(nodes).union(shapes) + + attrs = cmds.listAttr(list(nodes), userDefined=True) or [] + user_defined_attributes = list(sorted(set(attrs))) + instance.data["userDefinedAttributes"] = user_defined_attributes + + self.log.debug( + "Collected user defined attributes: {}".format( + ", ".join(user_defined_attributes) + ) + ) diff --git a/client/ayon_maya/plugins/publish/collect_vrayproxy.py b/client/ayon_maya/plugins/publish/collect_vrayproxy.py new file mode 100644 index 00000000..8630f56e --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_vrayproxy.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +"""Collect Vray Proxy.""" +import pyblish.api + + +class CollectVrayProxy(pyblish.api.InstancePlugin): + """Collect Vray Proxy instance. + + Add `pointcache` family for it. + """ + order = pyblish.api.CollectorOrder + 0.01 + label = "Collect Vray Proxy" + families = ["vrayproxy"] + + def process(self, instance): + """Collector entry point.""" + if not instance.data.get("families"): + instance.data["families"] = [] + + if instance.data.get("vrmesh"): + instance.data["families"].append("vrayproxy.vrmesh") + + if instance.data.get("alembic"): + instance.data["families"].append("vrayproxy.alembic") diff --git a/client/ayon_maya/plugins/publish/collect_vrayscene.py b/client/ayon_maya/plugins/publish/collect_vrayscene.py new file mode 100644 index 00000000..ea706429 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_vrayscene.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +"""Collect Vray Scene and prepare it for extraction and publishing.""" + +import pyblish.api + +from ayon_core.lib import get_formatted_current_time +from ayon_maya.api import lib + + +class CollectVrayScene(pyblish.api.InstancePlugin): + """Collect Vray Scene. + + If export on farm is checked, job is created to export it. + """ + + order = pyblish.api.CollectorOrder + 0.01 + label = "Collect Vray Scene" + families = ["vrayscene"] + + def process(self, instance): + """Collector entry point.""" + + context = instance.context + + layer = instance.data["transientData"]["layer"] + layer_name = layer.name() + + renderer = self.get_render_attribute("currentRenderer", + layer=layer_name) + if renderer != "vray": + self.log.warning("Layer '{}' renderer is not set to V-Ray".format( + layer_name + )) + + # collect all frames we are expecting to be rendered + frame_start_render = int(self.get_render_attribute( + "startFrame", layer=layer_name)) + frame_end_render = int(self.get_render_attribute( + "endFrame", layer=layer_name)) + + if (int(context.data['frameStartHandle']) == frame_start_render + and int(context.data['frameEndHandle']) == frame_end_render): # noqa: W503, E501 + + handle_start = context.data['handleStart'] + handle_end = context.data['handleEnd'] + frame_start = context.data['frameStart'] + frame_end = context.data['frameEnd'] + frame_start_handle = context.data['frameStartHandle'] + frame_end_handle = context.data['frameEndHandle'] + else: + handle_start = 0 + handle_end = 0 + frame_start = frame_start_render + frame_end = frame_end_render + frame_start_handle = frame_start_render + frame_end_handle = frame_end_render + + # Get layer specific settings, might be overrides + product_type = "vrayscene_layer" + data = { + "productName": layer_name, + "layer": layer_name, + # TODO: This likely needs fixing now + # Before refactor: cmds.sets(layer, q=True) or ["*"] + "setMembers": ["*"], + "review": False, + "publish": True, + "handleStart": handle_start, + "handleEnd": handle_end, + "frameStart": frame_start, + "frameEnd": frame_end, + "frameStartHandle": frame_start_handle, + "frameEndHandle": frame_end_handle, + "byFrameStep": int( + self.get_render_attribute("byFrameStep", + layer=layer_name)), + "renderer": renderer, + # instance product type + "productType": product_type, + "family": product_type, + "families": [product_type], + "time": get_formatted_current_time(), + "author": context.data["user"], + # Add source to allow tracing back to the scene from + # which was submitted originally + "source": context.data["currentFile"].replace("\\", "/"), + "resolutionWidth": lib.get_attr_in_layer( + "defaultResolution.height", layer=layer_name + ), + "resolutionHeight": lib.get_attr_in_layer( + "defaultResolution.width", layer=layer_name + ), + "pixelAspect": lib.get_attr_in_layer( + "defaultResolution.pixelAspect", layer=layer_name + ), + "priority": instance.data.get("priority"), + "useMultipleSceneFiles": instance.data.get( + "vraySceneMultipleFiles") + } + + instance.data.update(data) + + # Define nice label + label = "{0} ({1})".format(layer_name, instance.data["folderPath"]) + label += " [{0}-{1}]".format( + int(data["frameStartHandle"]), int(data["frameEndHandle"]) + ) + instance.data["label"] = label + + def get_render_attribute(self, attr, layer): + """Get attribute from render options. + + Args: + attr (str): name of attribute to be looked up. + + Returns: + Attribute value + + """ + return lib.get_attr_in_layer( + "defaultRenderGlobals.{}".format(attr), layer=layer + ) diff --git a/client/ayon_maya/plugins/publish/collect_workfile.py b/client/ayon_maya/plugins/publish/collect_workfile.py new file mode 100644 index 00000000..e2b64f1e --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_workfile.py @@ -0,0 +1,36 @@ +import os +import pyblish.api + + +class CollectWorkfileData(pyblish.api.InstancePlugin): + """Inject data into Workfile instance""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Maya Workfile" + hosts = ['maya'] + families = ["workfile"] + + def process(self, instance): + """Inject the current working file""" + + context = instance.context + current_file = instance.context.data['currentFile'] + folder, file = os.path.split(current_file) + filename, ext = os.path.splitext(file) + + data = { # noqa + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + } + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) diff --git a/client/ayon_maya/plugins/publish/collect_workscene_fps.py b/client/ayon_maya/plugins/publish/collect_workscene_fps.py new file mode 100644 index 00000000..41d6ffea --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_workscene_fps.py @@ -0,0 +1,15 @@ +import pyblish.api +from maya import mel + + +class CollectWorksceneFPS(pyblish.api.ContextPlugin): + """Get the FPS of the work scene""" + + label = "Workscene FPS" + order = pyblish.api.CollectorOrder + hosts = ["maya"] + + def process(self, context): + fps = mel.eval('currentTimeUnitToFPS()') + self.log.info("Workscene FPS: %s" % fps) + context.data.update({"fps": fps}) diff --git a/client/ayon_maya/plugins/publish/collect_xgen.py b/client/ayon_maya/plugins/publish/collect_xgen.py new file mode 100644 index 00000000..d09f60c8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_xgen.py @@ -0,0 +1,71 @@ +import os + +from maya import cmds + +import pyblish.api +from ayon_maya.api.lib import get_attribute_input + + +class CollectXgen(pyblish.api.InstancePlugin): + """Collect Xgen""" + + order = pyblish.api.CollectorOrder + 0.499999 + label = "Collect Xgen" + families = ["xgen"] + + def process(self, instance): + data = { + "xgmPalettes": cmds.ls(instance, type="xgmPalette", long=True), + "xgmDescriptions": cmds.ls( + instance, type="xgmDescription", long=True + ), + "xgmSubdPatches": cmds.ls(instance, type="xgmSubdPatch", long=True) + } + data["xgenNodes"] = ( + data["xgmPalettes"] + + data["xgmDescriptions"] + + data["xgmSubdPatches"] + ) + + if data["xgmPalettes"]: + data["xgmPalette"] = data["xgmPalettes"][0] + + data["xgenConnections"] = set() + for node in data["xgmSubdPatches"]: + connected_transform = get_attribute_input( + node + ".transform" + ).split(".")[0] + data["xgenConnections"].add(connected_transform) + + # Collect all files under palette root as resources. + import xgenm + + data_path = xgenm.getAttr( + "xgDataPath", data["xgmPalette"].replace("|", "") + ).split(os.pathsep)[0] + data_path = data_path.replace( + "${PROJECT}", + xgenm.getAttr("xgProjectPath", data["xgmPalette"].replace("|", "")) + ) + transfers = [] + + # Since we are duplicating this palette when extracting we predict that + # the name will be the basename without namespaces. + predicted_palette_name = data["xgmPalette"].split(":")[-1] + predicted_palette_name = predicted_palette_name.replace("|", "") + + for root, _, files in os.walk(data_path): + for file in files: + source = os.path.join(root, file).replace("\\", "/") + destination = os.path.join( + instance.data["resourcesDir"], + "collections", + predicted_palette_name, + source.replace(data_path, "")[1:] + ) + transfers.append((source, destination.replace("\\", "/"))) + + data["transfers"] = transfers + + self.log.debug(data) + instance.data.update(data) diff --git a/client/ayon_maya/plugins/publish/collect_yeti_cache.py b/client/ayon_maya/plugins/publish/collect_yeti_cache.py new file mode 100644 index 00000000..fa967082 --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_yeti_cache.py @@ -0,0 +1,95 @@ +from maya import cmds + +import pyblish.api + +from ayon_maya.api import lib +from ayon_maya.api.yeti import get_yeti_user_variables + + +SETTINGS = { + # Preview + "displayOutput", + "colorR", "colorG", "colorB", + "viewportDensity", + "viewportWidth", + "viewportLength", + # Render attributes + "renderDensity", + "renderWidth", + "renderLength", + "increaseRenderBounds", + "imageSearchPath", + # Pipeline specific + "cbId" +} + + +class CollectYetiCache(pyblish.api.InstancePlugin): + """Collect all information of the Yeti caches + + The information contains the following attributes per Yeti node + + - "renderDensity" + - "renderWidth" + - "renderLength" + - "increaseRenderBounds" + - "imageSearchPath" + + Other information is the name of the transform and its `cbId` + """ + + order = pyblish.api.CollectorOrder + 0.45 + label = "Collect Yeti Cache" + families = ["yetiRig", "yeticache", "yeticacheUE"] + hosts = ["maya"] + + def process(self, instance): + + # Collect fur settings + settings = {"nodes": []} + + # Get yeti nodes and their transforms + yeti_shapes = cmds.ls(instance, type="pgYetiMaya") + for shape in yeti_shapes: + + # Get specific node attributes + attr_data = {} + for attr in SETTINGS: + # Ignore non-existing attributes with a warning, e.g. cbId + # if they have not been generated yet + if not cmds.attributeQuery(attr, node=shape, exists=True): + self.log.warning( + "Attribute '{}' not found on Yeti node: {}".format( + attr, shape + ) + ) + continue + + current = cmds.getAttr("%s.%s" % (shape, attr)) + # change None to empty string as Maya doesn't support + # NoneType in attributes + if current is None: + current = "" + attr_data[attr] = current + + # Get user variable attributes + user_variable_attrs = { + attr: lib.get_attribute("{}.{}".format(shape, attr)) + for attr in get_yeti_user_variables(shape) + } + + # Get transform data + parent = cmds.listRelatives(shape, parent=True)[0] + transform_data = {"name": parent, "cbId": lib.get_id(parent)} + + shape_data = { + "transform": transform_data, + "name": shape, + "cbId": lib.get_id(shape), + "attrs": attr_data, + "user_variables": user_variable_attrs + } + + settings["nodes"].append(shape_data) + + instance.data["fursettings"] = settings diff --git a/client/ayon_maya/plugins/publish/collect_yeti_rig.py b/client/ayon_maya/plugins/publish/collect_yeti_rig.py new file mode 100644 index 00000000..95e6afdb --- /dev/null +++ b/client/ayon_maya/plugins/publish/collect_yeti_rig.py @@ -0,0 +1,309 @@ +import os +import re + +from maya import cmds + +import pyblish.api + +from ayon_maya.api import lib +from ayon_core.pipeline.publish import KnownPublishError + + +SETTINGS = {"renderDensity", + "renderWidth", + "renderLength", + "increaseRenderBounds", + "imageSearchPath", + "cbId"} + + +class CollectYetiRig(pyblish.api.InstancePlugin): + """Collect all information of the Yeti Rig""" + + order = pyblish.api.CollectorOrder + 0.4 + label = "Collect Yeti Rig" + families = ["yetiRig"] + hosts = ["maya"] + + def process(self, instance): + + assert "input_SET" in instance.data["setMembers"], ( + "Yeti Rig must have an input_SET") + + input_connections = self.collect_input_connections(instance) + + # Collect any textures if used + yeti_resources = [] + yeti_nodes = cmds.ls(instance[:], type="pgYetiMaya", long=True) + for node in yeti_nodes: + # Get Yeti resources (textures) + resources = self.get_yeti_resources(node) + yeti_resources.extend(resources) + + instance.data["rigsettings"] = {"inputs": input_connections} + + instance.data["resources"] = yeti_resources + + # Force frame range for yeti cache export for the rig + start = cmds.playbackOptions(query=True, animationStartTime=True) + for key in ["frameStart", "frameEnd", + "frameStartHandle", "frameEndHandle"]: + instance.data[key] = start + instance.data["preroll"] = 0 + + def collect_input_connections(self, instance): + """Collect the inputs for all nodes in the input_SET""" + + # Get the input meshes information + input_content = cmds.ls(cmds.sets("input_SET", query=True), long=True) + + # Include children + input_content += cmds.listRelatives(input_content, + allDescendents=True, + fullPath=True) or [] + + # Ignore intermediate objects + input_content = cmds.ls(input_content, long=True, noIntermediate=True) + if not input_content: + return [] + + # Store all connections + connections = cmds.listConnections(input_content, + source=True, + destination=False, + connections=True, + # Only allow inputs from dagNodes + # (avoid display layers, etc.) + type="dagNode", + plugs=True) or [] + connections = cmds.ls(connections, long=True) # Ensure long names + + inputs = [] + for dest, src in lib.pairwise(connections): + source_node, source_attr = src.split(".", 1) + dest_node, dest_attr = dest.split(".", 1) + + # Ensure the source of the connection is not included in the + # current instance's hierarchy. If so, we ignore that connection + # as we will want to preserve it even over a publish. + if source_node in instance: + self.log.debug("Ignoring input connection between nodes " + "inside the instance: %s -> %s" % (src, dest)) + continue + + inputs.append({"connections": [source_attr, dest_attr], + "sourceID": lib.get_id(source_node), + "destinationID": lib.get_id(dest_node)}) + + return inputs + + def get_yeti_resources(self, node): + """Get all resource file paths + + If a texture is a sequence it gathers all sibling files to ensure + the texture sequence is complete. + + References can be used in the Yeti graph, this means that it is + possible to load previously caches files. The information will need + to be stored and, if the file not publish, copied to the resource + folder. + + Args: + node (str): node name of the pgYetiMaya node + + Returns: + list + """ + resources = [] + + image_search_paths = cmds.getAttr("{}.imageSearchPath".format(node)) + if image_search_paths: + + # TODO: Somehow this uses OS environment path separator, `:` vs `;` + # Later on check whether this is pipeline OS cross-compatible. + image_search_paths = [p for p in + image_search_paths.split(os.path.pathsep) if p] + + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + image_search_paths = self._replace_tokens(image_search_paths) + + # List all related textures + texture_nodes = cmds.pgYetiGraph( + node, listNodes=True, type="texture") + texture_filenames = [ + cmds.pgYetiGraph( + node, node=texture_node, + param="file_name", getParamValue=True) + for texture_node in texture_nodes + ] + self.log.debug("Found %i texture(s)" % len(texture_filenames)) + + # Get all reference nodes + reference_nodes = cmds.pgYetiGraph(node, + listNodes=True, + type="reference") + self.log.debug("Found %i reference node(s)" % len(reference_nodes)) + + # Collect all texture files + # find all ${TOKEN} tokens and replace them with $TOKEN env. variable + texture_filenames = self._replace_tokens(texture_filenames) + for texture in texture_filenames: + + files = [] + if os.path.isabs(texture): + self.log.debug("Texture is absolute path, ignoring " + "image search paths for: %s" % texture) + files = self.search_textures(texture) + else: + for root in image_search_paths: + filepath = os.path.join(root, texture) + files = self.search_textures(filepath) + if files: + # Break out on first match in search paths.. + break + + if not files: + raise KnownPublishError( + "No texture found for: %s " + "(searched: %s)" % (texture, image_search_paths)) + + item = { + "files": files, + "source": texture, + "node": node + } + + resources.append(item) + + # For now validate that every texture has at least a single file + # resolved. Since a 'resource' does not have the requirement of having + # a `files` explicitly mapped it's not explicitly validated. + # TODO: Validate this as a validator + invalid_resources = [] + for resource in resources: + if not resource['files']: + invalid_resources.append(resource) + if invalid_resources: + raise RuntimeError("Invalid resources") + + # Collect all referenced files + for reference_node in reference_nodes: + ref_file = cmds.pgYetiGraph(node, + node=reference_node, + param="reference_file", + getParamValue=True) + + # Create resource dict + item = { + "source": ref_file, + "node": node, + "graphnode": reference_node, + "param": "reference_file", + "files": [] + } + + ref_file_name = os.path.basename(ref_file) + if "%04d" in ref_file_name: + item["files"] = self.get_sequence(ref_file) + else: + if os.path.exists(ref_file) and os.path.isfile(ref_file): + item["files"] = [ref_file] + + if not item["files"]: + self.log.warning("Reference node '%s' has no valid file " + "path set: %s" % (reference_node, ref_file)) + # TODO: This should allow to pass and fail in Validator instead + raise RuntimeError("Reference node must be a full file path!") + + resources.append(item) + + return resources + + def search_textures(self, filepath): + """Search all texture files on disk. + + This also parses to full sequences for those with dynamic patterns + like and %04d in the filename. + + Args: + filepath (str): The full path to the file, including any + dynamic patterns like or %04d + + Returns: + list: The files found on disk + + """ + filename = os.path.basename(filepath) + + # Collect full sequence if it matches a sequence pattern + if len(filename.split(".")) > 2: + + # For UDIM based textures (tiles) + if "" in filename: + sequences = self.get_sequence(filepath, + pattern="") + if sequences: + return sequences + + # Frame/time - Based textures (animated masks f.e) + elif "%04d" in filename: + sequences = self.get_sequence(filepath, + pattern="%04d") + if sequences: + return sequences + + # Assuming it is a fixed name (single file) + if os.path.exists(filepath): + return [filepath] + + return [] + + def get_sequence(self, filepath, pattern="%04d"): + """Get sequence from filename. + + This will only return files if they exist on disk as it tries + to collect the sequence using the filename pattern and searching + for them on disk. + + Supports negative frame ranges like -001, 0000, 0001 and -0001, + 0000, 0001. + + Arguments: + filepath (str): The full path to filename containing the given + pattern. + pattern (str): The pattern to swap with the variable frame number. + + Returns: + list: file sequence. + + """ + import clique + + escaped = re.escape(filepath) + re_pattern = escaped.replace(pattern, "-?[0-9]+") + + source_dir = os.path.dirname(filepath) + files = [f for f in os.listdir(source_dir) + if re.match(re_pattern, f)] + + pattern = [clique.PATTERNS["frames"]] + collection, remainder = clique.assemble(files, patterns=pattern) + + return collection + + def _replace_tokens(self, strings): + env_re = re.compile(r"\$\{(\w+)\}") + + replaced = [] + for s in strings: + matches = re.finditer(env_re, s) + for m in matches: + try: + s = s.replace(m.group(), os.environ[m.group(1)]) + except KeyError: + msg = "Cannot find requested {} in environment".format( + m.group(1)) + self.log.error(msg) + raise RuntimeError(msg) + replaced.append(s) + return replaced diff --git a/client/ayon_maya/plugins/publish/determine_future_version.py b/client/ayon_maya/plugins/publish/determine_future_version.py new file mode 100644 index 00000000..5b597f27 --- /dev/null +++ b/client/ayon_maya/plugins/publish/determine_future_version.py @@ -0,0 +1,36 @@ +import pyblish.api + + +class DetermineFutureVersion(pyblish.api.InstancePlugin): + """ + This will determine version of product if we want render to be attached to. + """ + label = "Determine Product Version" + order = pyblish.api.IntegratorOrder + hosts = ["maya"] + families = ["renderlayer"] + + def process(self, instance): + context = instance.context + attatch_to_products = [ + i["productName"] + for i in instance.data["attachTo"] + ] + if not attatch_to_products: + return + + for i in context: + if i.data["productName"] not in attatch_to_products: + continue + # # this will get corresponding product in attachTo list + # # so we can set version there + sub = next( + item + for item in instance.data["attachTo"] + if item["productName"] == i.data["productName"] + ) + + sub["version"] = i.data.get("version", 1) + self.log.info("render will be attached to {} v{}".format( + sub["productName"], sub["version"] + )) diff --git a/client/ayon_maya/plugins/publish/extract_active_view_thumbnail.py b/client/ayon_maya/plugins/publish/extract_active_view_thumbnail.py new file mode 100644 index 00000000..777bc200 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_active_view_thumbnail.py @@ -0,0 +1,60 @@ +import maya.api.OpenMaya as om +import maya.api.OpenMayaUI as omui + +import pyblish.api +import tempfile + +from ayon_maya.api.lib import IS_HEADLESS + + +class ExtractActiveViewThumbnail(pyblish.api.InstancePlugin): + """Set instance thumbnail to a screengrab of current active viewport. + + This makes it so that if an instance does not have a thumbnail set yet that + it will get a thumbnail of the currently active view at the time of + publishing as a fallback. + + """ + order = pyblish.api.ExtractorOrder + 0.49 + label = "Active View Thumbnail" + families = ["workfile"] + hosts = ["maya"] + + def process(self, instance): + if IS_HEADLESS: + self.log.debug( + "Skip extraction of active view thumbnail, due to being in" + "headless mode." + ) + return + + thumbnail = instance.data.get("thumbnailPath") + if not thumbnail: + view_thumbnail = self.get_view_thumbnail(instance) + if not view_thumbnail: + return + + self.log.debug("Setting instance thumbnail path to: {}".format( + view_thumbnail + )) + instance.data["thumbnailPath"] = view_thumbnail + + def get_view_thumbnail(self, instance): + cache_key = "__maya_view_thumbnail" + context = instance.context + + if cache_key not in context.data: + # Generate only a single thumbnail, even for multiple instances + with tempfile.NamedTemporaryFile(suffix="_thumbnail.jpg", + delete=False) as f: + path = f.name + + view = omui.M3dView.active3dView() + image = om.MImage() + view.readColorBuffer(image, True) + image.writeToFile(path, "jpg") + self.log.debug("Generated thumbnail: {}".format(path)) + + context.data["cleanupFullPaths"].append(path) + context.data[cache_key] = path + return context.data[cache_key] diff --git a/client/ayon_maya/plugins/publish/extract_arnold_scene_source.py b/client/ayon_maya/plugins/publish/extract_arnold_scene_source.py new file mode 100644 index 00000000..2829420a --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_arnold_scene_source.py @@ -0,0 +1,247 @@ +import os +from collections import defaultdict +import json + +from maya import cmds +import arnold + +from ayon_core.pipeline import publish +from ayon_maya.api import lib + + +class ExtractArnoldSceneSource(publish.Extractor): + """Extract the content of the instance to an Arnold Scene Source file.""" + + label = "Extract Arnold Scene Source" + hosts = ["maya"] + families = ["ass"] + asciiAss = False + + def _pre_process(self, instance, staging_dir): + file_path = os.path.join(staging_dir, "{}.ass".format(instance.name)) + + # Mask + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + # Motion blur + attribute_data = { + "defaultArnoldRenderOptions.motion_blur_enable": instance.data.get( + "motionBlur", True + ), + "defaultArnoldRenderOptions.motion_steps": instance.data.get( + "motionBlurKeys", 2 + ), + "defaultArnoldRenderOptions.motion_frames": instance.data.get( + "motionBlurLength", 0.5 + ) + } + + # Write out .ass file + kwargs = { + "filename": file_path, + "startFrame": instance.data.get("frameStartHandle", 1), + "endFrame": instance.data.get("frameEndHandle", 1), + "frameStep": instance.data.get("step", 1), + "selected": True, + "asciiAss": self.asciiAss, + "shadowLinks": True, + "lightLinks": True, + "boundingBox": True, + "expandProcedurals": instance.data.get("expandProcedurals", False), + "camera": instance.data["camera"], + "mask": mask + } + + if "representations" not in instance.data: + instance.data["representations"] = [] + + return attribute_data, kwargs + + def process(self, instance): + staging_dir = self.staging_dir(instance) + attribute_data, kwargs = self._pre_process(instance, staging_dir) + + filenames = self._extract( + instance.data["members"], attribute_data, kwargs + ) + + self._post_process( + instance, filenames, staging_dir, kwargs["startFrame"] + ) + + def _post_process(self, instance, filenames, staging_dir, frame_start): + nodes_by_id = self._nodes_by_id(instance[:]) + representation = { + "name": "ass", + "ext": "ass", + "files": filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + "frameStart": frame_start + } + + instance.data["representations"].append(representation) + + json_path = os.path.join( + staging_dir, "{}.json".format(instance.name) + ) + with open(json_path, "w") as f: + json.dump(nodes_by_id, f) + + representation = { + "name": "json", + "ext": "json", + "files": os.path.basename(json_path), + "stagingDir": staging_dir + } + + instance.data["representations"].append(representation) + + self.log.debug( + "Extracted instance {} to: {}".format(instance.name, staging_dir) + ) + + def _nodes_by_id(self, nodes): + nodes_by_id = defaultdict(list) + + for node in nodes: + id = lib.get_id(node) + + if id is None: + continue + + # Converting Maya hierarchy separator "|" to Arnold separator "/". + nodes_by_id[id].append(node.replace("|", "/")) + + return nodes_by_id + + def _extract(self, nodes, attribute_data, kwargs): + filenames = [] + with lib.attribute_values(attribute_data): + with lib.maintained_selection(): + self.log.debug( + "Writing: {}".format(nodes) + ) + cmds.select(nodes, noExpand=True) + + self.log.debug( + "Extracting ass sequence with: {}".format(kwargs) + ) + + exported_files = cmds.arnoldExportAss(**kwargs) + + for file in exported_files: + filenames.append(os.path.split(file)[1]) + + self.log.debug("Exported: {}".format(filenames)) + + return filenames + + +class ExtractArnoldSceneSourceProxy(ExtractArnoldSceneSource): + """Extract the content of the instance to an Arnold Scene Source file.""" + + label = "Extract Arnold Scene Source Proxy" + hosts = ["maya"] + families = ["assProxy"] + asciiAss = True + + def process(self, instance): + staging_dir = self.staging_dir(instance) + attribute_data, kwargs = self._pre_process(instance, staging_dir) + + filenames, _ = self._duplicate_extract( + instance.data["members"], attribute_data, kwargs + ) + + self._post_process( + instance, filenames, staging_dir, kwargs["startFrame"] + ) + + kwargs["filename"] = os.path.join( + staging_dir, "{}_proxy.ass".format(instance.name) + ) + + filenames, _ = self._duplicate_extract( + instance.data["proxy"], attribute_data, kwargs + ) + + representation = { + "name": "proxy", + "ext": "ass", + "files": filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + "frameStart": kwargs["startFrame"], + "outputName": "proxy" + } + + instance.data["representations"].append(representation) + + def _duplicate_extract(self, nodes, attribute_data, kwargs): + self.log.debug( + "Writing {} with:\n{}".format(kwargs["filename"], kwargs) + ) + filenames = [] + # Duplicating nodes so they are direct children of the world. This + # makes the hierarchy of any exported ass file the same. + with lib.delete_after() as delete_bin: + duplicate_nodes = [] + for node in nodes: + # Only interested in transforms: + if cmds.nodeType(node) != "transform": + continue + + # Only interested in transforms with shapes. + shapes = cmds.listRelatives( + node, shapes=True, noIntermediate=True + ) + if not shapes: + continue + + basename = cmds.duplicate(node)[0] + parents = cmds.ls(node, long=True)[0].split("|")[:-1] + duplicate_transform = "|".join(parents + [basename]) + + if cmds.listRelatives(duplicate_transform, parent=True): + duplicate_transform = cmds.parent( + duplicate_transform, world=True + )[0] + + basename = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] + duplicate_transform = cmds.rename( + duplicate_transform, basename + ) + + # Discard children nodes that are not shapes + shapes = cmds.listRelatives( + duplicate_transform, shapes=True, fullPath=True + ) + children = cmds.listRelatives( + duplicate_transform, children=True, fullPath=True + ) + cmds.delete(set(children) - set(shapes)) + + duplicate_nodes.append(duplicate_transform) + duplicate_nodes.extend(shapes) + delete_bin.append(duplicate_transform) + + nodes_by_id = self._nodes_by_id(duplicate_nodes) + filenames = self._extract(duplicate_nodes, attribute_data, kwargs) + + return filenames, nodes_by_id diff --git a/client/ayon_maya/plugins/publish/extract_assembly.py b/client/ayon_maya/plugins/publish/extract_assembly.py new file mode 100644 index 00000000..df390c07 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_assembly.py @@ -0,0 +1,69 @@ +import os +import json + +from ayon_core.pipeline import publish +from ayon_maya.api.alembic import extract_alembic + +from maya import cmds + + +class ExtractAssembly(publish.Extractor): + """Produce an alembic of just point positions and normals. + + Positions and normals are preserved, but nothing more, + for plain and predictable point caches. + + """ + + label = "Extract Assembly" + hosts = ["maya"] + families = ["assembly"] + + def process(self, instance): + + staging_dir = self.staging_dir(instance) + hierarchy_filename = "{}.abc".format(instance.name) + hierarchy_path = os.path.join(staging_dir, hierarchy_filename) + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(staging_dir, json_filename) + + self.log.debug("Dumping scene data for debugging ..") + with open(json_path, "w") as filepath: + json.dump(instance.data["scenedata"], filepath, ensure_ascii=False) + + self.log.debug("Extracting pointcache ..") + cmds.select(instance.data["nodesHierarchy"]) + + # Run basic alembic exporter + extract_alembic(file=hierarchy_path, + startFrame=1.0, + endFrame=1.0, + **{"step": 1.0, + "attr": ["cbId"], + "writeVisibility": True, + "writeCreases": True, + "uvWrite": True, + "selection": True}) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation_abc = { + 'name': 'abc', + 'ext': 'abc', + 'files': hierarchy_filename, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation_abc) + + representation_json = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation_json) + # Remove data + instance.data.pop("scenedata", None) + + cmds.select(clear=True) diff --git a/client/ayon_maya/plugins/publish/extract_camera_alembic.py b/client/ayon_maya/plugins/publish/extract_camera_alembic.py new file mode 100644 index 00000000..e36f964a --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_camera_alembic.py @@ -0,0 +1,124 @@ +import os +import json + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api import lib + + +class ExtractCameraAlembic(publish.Extractor, + publish.OptionalPyblishPluginMixin): + """Extract a Camera as Alembic. + + The camera gets baked to world space by default. Only when the instance's + `bakeToWorldSpace` is set to False it will include its full hierarchy. + + 'camera' product type expects only single camera, if multiple cameras + are needed, 'matchmove' is better choice. + + """ + + label = "Extract Camera (Alembic)" + hosts = ["maya"] + families = ["camera", "matchmove"] + bake_attributes = "[]" + + def process(self, instance): + + # Collect the start and end including handles + start = instance.data["frameStartHandle"] + end = instance.data["frameEndHandle"] + + step = instance.data.get("step", 1.0) + bake_to_worldspace = instance.data("bakeToWorldSpace", True) + + # get cameras + members = instance.data['setMembers'] + cameras = cmds.ls(members, leaf=True, long=True, + dag=True, type="camera") + + # validate required settings + assert isinstance(step, float), "Step must be a float value" + + # Define extract output file path + dir_path = self.staging_dir(instance) + if not os.path.exists(dir_path): + os.makedirs(dir_path) + filename = "{0}.abc".format(instance.name) + path = os.path.join(dir_path, filename) + + # Perform alembic extraction + member_shapes = cmds.ls( + members, leaf=True, shapes=True, long=True, dag=True) + with lib.maintained_selection(): + cmds.select( + member_shapes, + replace=True, noExpand=True) + + # Enforce forward slashes for AbcExport because we're + # embedding it into a job string + path = path.replace("\\", "/") + + job_str = ' -selection -dataFormat "ogawa" ' + job_str += ' -attrPrefix cb' + job_str += ' -frameRange {0} {1} '.format(start, end) + job_str += ' -step {0} '.format(step) + + if bake_to_worldspace: + job_str += ' -worldSpace' + + # if baked, drop the camera hierarchy to maintain + # clean output and backwards compatibility + camera_roots = cmds.listRelatives( + cameras, parent=True, fullPath=True) + for camera_root in camera_roots: + job_str += ' -root {0}'.format(camera_root) + + for member in members: + descendants = cmds.listRelatives(member, + allDescendents=True, + fullPath=True) or [] + shapes = cmds.ls(descendants, shapes=True, + noIntermediate=True, long=True) + cameras = cmds.ls(shapes, type="camera", long=True) + if cameras: + if not set(shapes) - set(cameras): + continue + self.log.warning(( + "Camera hierarchy contains additional geometry. " + "Extraction will fail.") + ) + transform = cmds.listRelatives( + member, parent=True, fullPath=True) + transform = transform[0] if transform else member + job_str += ' -root {0}'.format(transform) + + job_str += ' -file "{0}"'.format(path) + + bake_attributes = json.loads(self.bake_attributes) + # bake specified attributes in preset + assert isinstance(bake_attributes, list), ( + "Attributes to bake must be specified as a list" + ) + for attr in bake_attributes: + self.log.debug("Adding {} attribute".format(attr)) + job_str += " -attr {0}".format(attr) + + with lib.evaluation("off"): + with lib.suspended_refresh(): + cmds.AbcExport(j=job_str, verbose=False) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": dir_path, + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '{0}' to: {1}".format( + instance.name, path)) diff --git a/client/ayon_maya/plugins/publish/extract_camera_mayaScene.py b/client/ayon_maya/plugins/publish/extract_camera_mayaScene.py new file mode 100644 index 00000000..62ce0a18 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_camera_mayaScene.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +"""Extract camera as Maya Scene.""" +import os +import itertools +import contextlib + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api import lib +from ayon_core.lib import ( + BoolDef +) + + +def massage_ma_file(path): + """Clean up .ma file for backwards compatibility. + + Massage the .ma of baked camera to stay + backwards compatible with older versions + of Fusion (6.4) + + """ + # Get open file's lines + f = open(path, "r+") + lines = f.readlines() + f.seek(0) # reset to start of file + + # Rewrite the file + for line in lines: + # Skip all 'rename -uid' lines + stripped = line.strip() + if stripped.startswith("rename -uid "): + continue + + f.write(line) + + f.truncate() # remove remainder + f.close() + + +def grouper(iterable, n, fillvalue=None): + """Collect data into fixed-length chunks or blocks. + + Examples: + grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx + + """ + args = [iter(iterable)] * n + from six.moves import zip_longest + return zip_longest(fillvalue=fillvalue, *args) + + +def unlock(plug): + """Unlocks attribute and disconnects inputs for a plug. + + This will also recursively unlock the attribute + upwards to any parent attributes for compound + attributes, to ensure it's fully unlocked and free + to change the value. + + """ + node, attr = plug.rsplit(".", 1) + + # Unlock attribute + cmds.setAttr(plug, lock=False) + + # Also unlock any parent attribute (if compound) + parents = cmds.attributeQuery(attr, node=node, listParent=True) + if parents: + for parent in parents: + unlock("{0}.{1}".format(node, parent)) + + # Break incoming connections + connections = cmds.listConnections(plug, + source=True, + destination=False, + plugs=True, + connections=True) + if connections: + for destination, source in grouper(connections, 2): + cmds.disconnectAttr(source, destination) + + +class ExtractCameraMayaScene(publish.Extractor, + publish.OptionalPyblishPluginMixin): + """Extract a Camera as Maya Scene. + + This will create a duplicate of the camera that will be baked *with* + substeps and handles for the required frames. This temporary duplicate + will be published. + + The cameras gets baked to world space by default. Only when the instance's + `bakeToWorldSpace` is set to False it will include its full hierarchy. + + 'camera' product type expects only single camera, if multiple cameras are + needed, 'matchmove' is better choice. + + Note: + The extracted Maya ascii file gets "massaged" removing the uuid values + so they are valid for older versions of Fusion (e.g. 6.4) + + """ + + label = "Extract Camera (Maya Scene)" + hosts = ["maya"] + families = ["camera", "matchmove"] + scene_type = "ma" + + keep_image_planes = True + + def process(self, instance): + """Plugin entry point.""" + # get settings + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass + + # Collect the start and end including handles + start = instance.data["frameStartHandle"] + end = instance.data["frameEndHandle"] + + step = instance.data.get("step", 1.0) + bake_to_worldspace = instance.data("bakeToWorldSpace", True) + + if not bake_to_worldspace: + self.log.warning("Camera (Maya Scene) export only supports world" + "space baked camera extractions. The disabled " + "bake to world space is ignored...") + + # get cameras + members = set(cmds.ls(instance.data['setMembers'], leaf=True, + shapes=True, long=True, dag=True)) + cameras = set(cmds.ls(members, leaf=True, shapes=True, long=True, + dag=True, type="camera")) + + # validate required settings + assert isinstance(step, float), "Step must be a float value" + transforms = cmds.listRelatives(list(cameras), + parent=True, fullPath=True) + + # Define extract output file path + dir_path = self.staging_dir(instance) + filename = "{0}.{1}".format(instance.name, self.scene_type) + path = os.path.join(dir_path, filename) + + # Perform extraction + with lib.maintained_selection(): + with lib.evaluation("off"): + with lib.suspended_refresh(): + if bake_to_worldspace: + baked = lib.bake_to_world_space( + transforms, + frame_range=[start, end], + step=step + ) + baked_camera_shapes = set(cmds.ls(baked, + type="camera", + dag=True, + shapes=True, + long=True)) + + members.update(baked_camera_shapes) + members.difference_update(cameras) + else: + baked_camera_shapes = cmds.ls(list(cameras), + type="camera", + dag=True, + shapes=True, + long=True) + + attrs = {"backgroundColorR": 0.0, + "backgroundColorG": 0.0, + "backgroundColorB": 0.0, + "overscan": 1.0} + + # Fix PLN-178: Don't allow background color to be non-black + for cam, (attr, value) in itertools.product(cmds.ls( + baked_camera_shapes, type="camera", dag=True, + long=True), attrs.items()): + plug = "{0}.{1}".format(cam, attr) + unlock(plug) + cmds.setAttr(plug, value) + + attr_values = self.get_attr_values_from_data( + instance.data) + keep_image_planes = attr_values.get("keep_image_planes") + + with transfer_image_planes(sorted(cameras), + sorted(baked_camera_shapes), + keep_image_planes): + + self.log.info("Performing extraction..") + cmds.select(cmds.ls(list(members), dag=True, + shapes=True, long=True), + noExpand=True) + cmds.file(path, + force=True, + typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501 + exportSelected=True, + preserveReferences=False, + constructionHistory=False, + channels=True, # allow animation + constraints=False, + shader=False, + expressions=False) + + # Delete the baked hierarchy + if bake_to_worldspace: + cmds.delete(baked) + if self.scene_type == "ma": + massage_ma_file(path) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': filename, + "stagingDir": dir_path, + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '{0}' to: {1}".format( + instance.name, path)) + + @classmethod + def get_attribute_defs(cls): + defs = super(ExtractCameraMayaScene, cls).get_attribute_defs() + + defs.extend([ + BoolDef("keep_image_planes", + label="Keep Image Planes", + tooltip="Preserving connected image planes on camera", + default=cls.keep_image_planes), + + ]) + + return defs + + +@contextlib.contextmanager +def transfer_image_planes(source_cameras, target_cameras, + keep_input_connections): + """Reattaches image planes to baked or original cameras. + + Baked cameras are duplicates of original ones. + This attaches it to duplicated camera properly and after + export it reattaches it back to original to keep image plane in workfile. + """ + originals = {} + try: + for source_camera, target_camera in zip(source_cameras, + target_cameras): + image_plane_plug = "{}.imagePlane".format(source_camera) + image_planes = cmds.listConnections(image_plane_plug, + source=True, + destination=False, + type="imagePlane") or [] + + # Split of the parent path they are attached - we want + # the image plane node name if attached to a camera. + # TODO: Does this still mean the image plane name is unique? + image_planes = [x.split("->", 1)[-1] for x in image_planes] + + if not image_planes: + continue + + originals[source_camera] = [] + for image_plane in image_planes: + if keep_input_connections: + if source_camera == target_camera: + continue + _attach_image_plane(target_camera, image_plane) + else: # explicitly detach image planes + cmds.imagePlane(image_plane, edit=True, detach=True) + originals[source_camera].append(image_plane) + yield + finally: + for camera, image_planes in originals.items(): + for image_plane in image_planes: + _attach_image_plane(camera, image_plane) + + +def _attach_image_plane(camera, image_plane): + cmds.imagePlane(image_plane, edit=True, detach=True) + + # Attaching to a camera resets it to identity size, so we counter that + size_x = cmds.getAttr(f"{image_plane}.sizeX") + size_y = cmds.getAttr(f"{image_plane}.sizeY") + cmds.imagePlane(image_plane, edit=True, camera=camera) + cmds.setAttr(f"{image_plane}.sizeX", size_x) + cmds.setAttr(f"{image_plane}.sizeY", size_y) diff --git a/client/ayon_maya/plugins/publish/extract_fbx.py b/client/ayon_maya/plugins/publish/extract_fbx.py new file mode 100644 index 00000000..d6a1de84 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_fbx.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +import os + +from maya import cmds # noqa +import maya.mel as mel # noqa +import pyblish.api + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection +from ayon_maya.api import fbx + + +class ExtractFBX(publish.Extractor): + """Extract FBX from Maya. + + This extracts reproducible FBX exports ignoring any of the + settings set on the local machine in the FBX export options window. + + """ + order = pyblish.api.ExtractorOrder + label = "Extract FBX" + families = ["fbx"] + + def process(self, instance): + fbx_exporter = fbx.FBXExtractor(log=self.log) + + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.fbx".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.debug("Extracting FBX to: {0}".format(path)) + + members = instance.data["setMembers"] + self.log.debug("Members: {0}".format(members)) + self.log.debug("Instance: {0}".format(instance[:])) + + fbx_exporter.set_options_from_instance(instance) + + # Export + with maintained_selection(): + fbx_exporter.export(members, path) + cmds.select(members, r=1, noExpand=True) + mel.eval('FBXExport -f "{}" -s'.format(path)) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'fbx', + 'ext': 'fbx', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extract FBX successful to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_fbx_animation.py b/client/ayon_maya/plugins/publish/extract_fbx_animation.py new file mode 100644 index 00000000..92ba0be9 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_fbx_animation.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +import os + +from maya import cmds # noqa +import pyblish.api + +from ayon_core.pipeline import publish +from ayon_maya.api import fbx +from ayon_maya.api.lib import ( + namespaced, get_namespace, strip_namespace +) + + +class ExtractFBXAnimation(publish.Extractor): + """Extract Rig in FBX format from Maya. + + This extracts the rig in fbx with the constraints + and referenced asset content included. + This also optionally extract animated rig in fbx with + geometries included. + + """ + order = pyblish.api.ExtractorOrder + label = "Extract Animation (FBX)" + hosts = ["maya"] + families = ["animation.fbx"] + + def process(self, instance): + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.fbx".format(instance.name) + path = os.path.join(staging_dir, filename) + path = path.replace("\\", "/") + + fbx_exporter = fbx.FBXExtractor(log=self.log) + out_members = instance.data.get("animated_skeleton", []) + # Export + # TODO: need to set up the options for users to set up + # the flags they intended to export + instance.data["skeletonDefinitions"] = True + instance.data["referencedAssetsContent"] = True + fbx_exporter.set_options_from_instance(instance) + # Export from the rig's namespace so that the exported + # FBX does not include the namespace but preserves the node + # names as existing in the rig workfile + if not out_members: + skeleton_set = [ + i for i in instance + if i.endswith("skeletonAnim_SET") + ] + self.log.debug( + "Top group of animated skeleton not found in " + "{}.\nSkipping fbx animation extraction.".format(skeleton_set)) + return + + namespace = get_namespace(out_members[0]) + relative_out_members = [ + strip_namespace(node, namespace) for node in out_members + ] + with namespaced( + ":" + namespace, + new=False, + relative_names=True + ) as namespace: + fbx_exporter.export(relative_out_members, path) + + representations = instance.data.setdefault("representations", []) + representations.append({ + 'name': 'fbx', + 'ext': 'fbx', + 'files': filename, + "stagingDir": staging_dir + }) + + self.log.debug( + "Extracted FBX animation to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_gltf.py b/client/ayon_maya/plugins/publish/extract_gltf.py new file mode 100644 index 00000000..1472454a --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_gltf.py @@ -0,0 +1,65 @@ +import os + +from maya import cmds, mel +import pyblish.api + +from ayon_core.pipeline import publish +from ayon_maya.api import lib +from ayon_maya.api.gltf import extract_gltf + + +class ExtractGLB(publish.Extractor): + + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract GLB" + families = ["gltf"] + + def process(self, instance): + staging_dir = self.staging_dir(instance) + filename = "{0}.glb".format(instance.name) + path = os.path.join(staging_dir, filename) + + cmds.loadPlugin("maya2glTF", quiet=True) + + nodes = instance[:] + + start_frame = instance.data('frameStart') or \ + int(cmds.playbackOptions(query=True, + animationStartTime=True))# noqa + end_frame = instance.data('frameEnd') or \ + int(cmds.playbackOptions(query=True, + animationEndTime=True)) # noqa + fps = mel.eval('currentTimeUnitToFPS()') + + options = { + "sno": True, # selectedNodeOnly + "nbu": True, # .bin instead of .bin0 + "ast": start_frame, + "aet": end_frame, + "afr": fps, + "dsa": 1, + "acn": instance.name, # codespell:ignore acn + "glb": True, + "vno": True # visibleNodeOnly + } + + self.log.debug("Extracting GLB to: {}".format(path)) + with lib.maintained_selection(): + cmds.select(nodes, hi=True, noExpand=True) + extract_gltf(staging_dir, + instance.name, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'glb', + 'ext': 'glb', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extract GLB successful to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_gpu_cache.py b/client/ayon_maya/plugins/publish/extract_gpu_cache.py new file mode 100644 index 00000000..4b293b57 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_gpu_cache.py @@ -0,0 +1,69 @@ +import json + +from maya import cmds + +from ayon_core.pipeline import publish + + +class ExtractGPUCache(publish.Extractor, + publish.OptionalPyblishPluginMixin): + """Extract the content of the instance to a GPU cache file.""" + + label = "GPU Cache" + hosts = ["maya"] + families = ["model", "animation", "pointcache"] + step = 1.0 + stepSave = 1 + optimize = True + optimizationThreshold = 40000 + optimizeAnimationsForMotionBlur = True + writeMaterials = True + useBaseTessellation = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + cmds.loadPlugin("gpuCache", quiet=True) + + staging_dir = self.staging_dir(instance) + filename = "{}_gpu_cache".format(instance.name) + + # Write out GPU cache file. + kwargs = { + "directory": staging_dir, + "fileName": filename, + "saveMultipleFiles": False, + "simulationRate": self.step, + "sampleMultiplier": self.stepSave, + "optimize": self.optimize, + "optimizationThreshold": self.optimizationThreshold, + "optimizeAnimationsForMotionBlur": ( + self.optimizeAnimationsForMotionBlur + ), + "writeMaterials": self.writeMaterials, + "useBaseTessellation": self.useBaseTessellation + } + self.log.debug( + "Extract {} with:\n{}".format( + instance[:], json.dumps(kwargs, indent=4, sort_keys=True) + ) + ) + cmds.gpuCache(instance[:], **kwargs) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": "gpu_cache", + "ext": "abc", + "files": filename + ".abc", + "stagingDir": staging_dir, + "outputName": "gpu_cache" + } + + instance.data["representations"].append(representation) + + self.log.debug( + "Extracted instance {} to: {}".format(instance.name, staging_dir) + ) diff --git a/client/ayon_maya/plugins/publish/extract_import_reference.py b/client/ayon_maya/plugins/publish/extract_import_reference.py new file mode 100644 index 00000000..e461499d --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_import_reference.py @@ -0,0 +1,159 @@ +import os +import sys + +from maya import cmds + +import pyblish.api +import tempfile + +from ayon_core.lib import run_subprocess +from ayon_core.pipeline import publish +from ayon_core.pipeline.publish import OptionalPyblishPluginMixin +from ayon_maya.api import lib + + +class ExtractImportReference(publish.Extractor, + OptionalPyblishPluginMixin): + """ + + Extract the scene with imported reference. + The temp scene with imported reference is + published for rendering if this extractor is activated + + """ + + label = "Extract Import Reference" + order = pyblish.api.ExtractorOrder - 0.48 + hosts = ["maya"] + families = ["renderlayer", "workfile"] + optional = True + tmp_format = "_tmp" + + @classmethod + def apply_settings(cls, project_settings): + cls.active = project_settings["deadline"]["publish"]["MayaSubmitDeadline"]["import_reference"] # noqa + + def process(self, instance): + if not self.is_active(instance.data): + return + + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using {} as scene type".format(self.scene_type)) + break + + except KeyError: + # set scene type to ma + self.scene_type = "ma" + + _scene_type = ("mayaAscii" + if self.scene_type == "ma" + else "mayaBinary") + + dir_path = self.staging_dir(instance) + # named the file with imported reference + if instance.name == "Main": + return + tmp_name = instance.name + self.tmp_format + current_name = cmds.file(query=True, sceneName=True) + ref_scene_name = "{0}.{1}".format(tmp_name, self.scene_type) + + reference_path = os.path.join(dir_path, ref_scene_name) + tmp_path = os.path.dirname(current_name) + "/" + ref_scene_name + + self.log.debug("Performing extraction..") + + # This generates script for mayapy to take care of reference + # importing outside current session. It is passing current scene + # name and destination scene name. + script = (""" +# -*- coding: utf-8 -*- +'''Script to import references to given scene.''' +import maya.standalone +maya.standalone.initialize() +# scene names filled by caller +current_name = "{current_name}" +ref_scene_name = "{ref_scene_name}" +print(">>> Opening {{}} ...".format(current_name)) +cmds.file(current_name, open=True, force=True) +print(">>> Processing references") +all_reference = cmds.file(q=True, reference=True) or [] +for ref in all_reference: + if cmds.referenceQuery(ref, il=True): + cmds.file(ref, importReference=True) + + nested_ref = cmds.file(q=True, reference=True) + if nested_ref: + for new_ref in nested_ref: + if new_ref not in all_reference: + all_reference.append(new_ref) + +print(">>> Finish importing references") +print(">>> Saving scene as {{}}".format(ref_scene_name)) + +cmds.file(rename=ref_scene_name) +cmds.file(save=True, force=True) +print("*** Done") + """).format(current_name=current_name, ref_scene_name=tmp_path) + mayapy_exe = os.path.join(os.getenv("MAYA_LOCATION"), "bin", "mayapy") + if sys.platform == "windows": + mayapy_exe += ".exe" + mayapy_exe = os.path.normpath(mayapy_exe) + # can't use TemporaryNamedFile as that can't be opened in another + # process until handles are closed by context manager. + with tempfile.TemporaryDirectory() as tmp_dir_name: + tmp_script_path = os.path.join(tmp_dir_name, "import_ref.py") + self.log.debug("Using script file: {}".format(tmp_script_path)) + with open(tmp_script_path, "wt") as tmp: + tmp.write(script) + + try: + run_subprocess([mayapy_exe, tmp_script_path]) + except Exception: + self.log.error("Import reference failed", exc_info=True) + raise + + with lib.maintained_selection(): + cmds.select(all=True, noExpand=True) + cmds.file(reference_path, + force=True, + typ=_scene_type, + exportSelected=True, + channels=True, + constraints=True, + shader=True, + expressions=True, + constructionHistory=True) + + instance.context.data["currentFile"] = tmp_path + + if "files" not in instance.data: + instance.data["files"] = [] + instance.data["files"].append(ref_scene_name) + + if instance.data.get("representations") is None: + instance.data["representations"] = [] + + ref_representation = { + "name": self.scene_type, + "ext": self.scene_type, + "files": ref_scene_name, + "stagingDir": os.path.dirname(current_name), + "outputName": "imported" + } + self.log.debug(ref_representation) + + instance.data["representations"].append(ref_representation) + + self.log.debug("Extracted instance '%s' to : '%s'" % (ref_scene_name, + reference_path)) diff --git a/client/ayon_maya/plugins/publish/extract_layout.py b/client/ayon_maya/plugins/publish/extract_layout.py new file mode 100644 index 00000000..b025a160 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_layout.py @@ -0,0 +1,170 @@ +import math +import os +import json + +from maya import cmds +from maya.api import OpenMaya as om +from ayon_api import get_representation_by_id + +from ayon_core.pipeline import publish + + +class ExtractLayout(publish.Extractor): + """Extract a layout.""" + + label = "Extract Layout" + hosts = ["maya"] + families = ["layout"] + project_container = "AVALON_CONTAINERS" + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + + # Perform extraction + self.log.debug("Performing extraction..") + + if "representations" not in instance.data: + instance.data["representations"] = [] + + json_data = [] + # TODO representation queries can be refactored to be faster + project_name = instance.context.data["projectName"] + + for asset in cmds.sets(str(instance), query=True): + # Find the container + project_container = self.project_container + container_list = cmds.ls(project_container) + if len(container_list) == 0: + self.log.warning("Project container is not found!") + self.log.warning("The asset(s) may not be properly loaded after published") # noqa + continue + + grp_loaded_ass = instance.data.get("groupLoadedAssets", False) + if grp_loaded_ass: + asset_list = cmds.listRelatives(asset, children=True) + # WARNING This does override 'asset' variable from parent loop + # is it correct? + for asset in asset_list: + grp_name = asset.split(':')[0] + else: + grp_name = asset.split(':')[0] + containers = cmds.ls("{}*_CON".format(grp_name)) + if len(containers) == 0: + self.log.warning("{} isn't from the loader".format(asset)) + self.log.warning("It may not be properly loaded after published") # noqa + continue + container = containers[0] + + representation_id = cmds.getAttr( + "{}.representation".format(container)) + + representation = get_representation_by_id( + project_name, + representation_id, + fields={"versionId", "context"} + ) + + self.log.debug(representation) + + version_id = representation["versionId"] + # TODO use product entity to get product type rather than + # data in representation 'context' + repre_context = representation["context"] + product_type = repre_context.get("product", {}).get("type") + if not product_type: + product_type = repre_context.get("family") + + json_element = { + "product_type": product_type, + "instance_name": cmds.getAttr( + "{}.namespace".format(container)), + "representation": str(representation_id), + "version": str(version_id) + } + + loc = cmds.xform(asset, query=True, translation=True) + rot = cmds.xform(asset, query=True, rotation=True, euler=True) + scl = cmds.xform(asset, query=True, relative=True, scale=True) + + json_element["transform"] = { + "translation": { + "x": loc[0], + "y": loc[1], + "z": loc[2] + }, + "rotation": { + "x": math.radians(rot[0]), + "y": math.radians(rot[1]), + "z": math.radians(rot[2]) + }, + "scale": { + "x": scl[0], + "y": scl[1], + "z": scl[2] + } + } + + row_length = 4 + t_matrix_list = cmds.xform(asset, query=True, matrix=True) + + transform_mm = om.MMatrix(t_matrix_list) + transform = om.MTransformationMatrix(transform_mm) + + t = transform.translation(om.MSpace.kWorld) + t = om.MVector(t.x, t.z, -t.y) + transform.setTranslation(t, om.MSpace.kWorld) + transform.rotateBy( + om.MEulerRotation(math.radians(-90), 0, 0), om.MSpace.kWorld) + transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kObject) + + t_matrix_list = list(transform.asMatrix()) + + t_matrix = [] + for i in range(0, len(t_matrix_list), row_length): + t_matrix.append(t_matrix_list[i:i + row_length]) + + json_element["transform_matrix"] = [ + list(row) + for row in t_matrix + ] + + basis_list = [ + 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, -1, 0, + 0, 0, 0, 1 + ] + + basis_mm = om.MMatrix(basis_list) + basis = om.MTransformationMatrix(basis_mm) + + b_matrix_list = list(basis.asMatrix()) + b_matrix = [] + + for i in range(0, len(b_matrix_list), row_length): + b_matrix.append(b_matrix_list[i:i + row_length]) + + json_element["basis"] = [] + for row in b_matrix: + json_element["basis"].append(list(row)) + + json_data.append(json_element) + + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(stagingdir, json_filename) + + with open(json_path, "w+") as file: + json.dump(json_data, fp=file, indent=2) + + json_representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(json_representation) + + self.log.debug("Extracted instance '%s' to: %s", + instance.name, json_representation) diff --git a/client/ayon_maya/plugins/publish/extract_look.py b/client/ayon_maya/plugins/publish/extract_look.py new file mode 100644 index 00000000..f6b5e7d5 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_look.py @@ -0,0 +1,891 @@ +# -*- coding: utf-8 -*- +"""Maya look extractor.""" +import os +import sys +import contextlib +import json +import logging +import tempfile +import platform +from abc import ABCMeta, abstractmethod +from collections import OrderedDict + +import six +import attr +import pyblish.api + +from maya import cmds # noqa + +from ayon_core.lib import ( + find_executable, + source_hash, + run_subprocess, + get_oiio_tool_args, + ToolNotFoundError, +) + +from ayon_core.pipeline import publish, KnownPublishError +from ayon_maya.api import lib + +# Modes for transfer +COPY = 1 +HARDLINK = 2 + + +@attr.s +class TextureResult(object): + """The resulting texture of a processed file for a resource""" + # Path to the file + path = attr.ib() + # Colorspace of the resulting texture. This might not be the input + # colorspace of the texture if a TextureProcessor has processed the file. + colorspace = attr.ib() + # Hash generated for the texture using ayon_core.lib.source_hash + file_hash = attr.ib() + # The transfer mode, e.g. COPY or HARDLINK + transfer_mode = attr.ib() + + +def find_paths_by_hash(texture_hash): + """Find the texture hash key in the dictionary. + + All paths that originate from it. + + Args: + texture_hash (str): Hash of the texture. + + Return: + str: path to texture if found. + + """ + raise KnownPublishError( + "This is a bug. \"find_paths_by_hash\" is not compatible with AYON." + ) + + +@contextlib.contextmanager +def no_workspace_dir(): + """Force maya to a fake temporary workspace directory. + + Note: This is not maya.cmds.workspace 'rootDirectory' but the 'directory' + + This helps to avoid Maya automatically remapping image paths to files + relative to the currently set directory. + + """ + + # Store current workspace + original = cmds.workspace(query=True, directory=True) + + # Set a fake workspace + fake_workspace_dir = tempfile.mkdtemp() + cmds.workspace(directory=fake_workspace_dir) + + try: + yield + finally: + try: + cmds.workspace(directory=original) + except RuntimeError: + # If the original workspace directory didn't exist either + # ignore the fact that it fails to reset it to the old path + pass + + # Remove the temporary directory + os.rmdir(fake_workspace_dir) + + +@six.add_metaclass(ABCMeta) +class TextureProcessor: + + extension = None + + def __init__(self, log=None): + if log is None: + log = logging.getLogger(self.__class__.__name__) + self.log = log + + def apply_settings(self, project_settings): + """Apply AYON system/project settings to the TextureProcessor + + Args: + project_settings (dict): AYON project settings + + Returns: + None + + """ + pass + + @abstractmethod + def process(self, + source, + colorspace, + color_management, + staging_dir): + """Process the `source` texture. + + Must be implemented on inherited class. + + This must always return a TextureResult even when it does not generate + a texture. If it doesn't generate a texture then it should return a + TextureResult using the input path and colorspace. + + Args: + source (str): Path to source file. + colorspace (str): Colorspace of the source file. + color_management (dict): Maya Color management data from + `lib.get_color_management_preferences` + staging_dir (str): Output directory to write to. + + Returns: + TextureResult: The resulting texture information. + + """ + pass + + def __repr__(self): + # Log instance as class name + return self.__class__.__name__ + + +class MakeRSTexBin(TextureProcessor): + """Make `.rstexbin` using `redshiftTextureProcessor`""" + + extension = ".rstexbin" + + def process(self, + source, + colorspace, + color_management, + staging_dir): + + texture_processor_path = self.get_redshift_tool( + "redshiftTextureProcessor" + ) + if not texture_processor_path: + raise KnownPublishError("Must have Redshift available.") + + subprocess_args = [ + texture_processor_path, + source + ] + + # if color management is enabled we pass color space information + if color_management["enabled"]: + config_path = color_management["config"] + if not os.path.exists(config_path): + raise RuntimeError("OCIO config not found at: " + "{}".format(config_path)) + + if not os.getenv("OCIO"): + self.log.debug( + "OCIO environment variable not set." + "Setting it with OCIO config from Maya." + ) + os.environ["OCIO"] = config_path + + self.log.debug("converting colorspace {0} to redshift render " + "colorspace".format(colorspace)) + subprocess_args.extend(["-cs", colorspace]) + + hash_args = ["rstex"] + texture_hash = source_hash(source, *hash_args) + + # Redshift stores the output texture next to the input but with + # the extension replaced to `.rstexbin` + basename, ext = os.path.splitext(source) + destination = "{}{}".format(basename, self.extension) + + self.log.debug(" ".join(subprocess_args)) + try: + run_subprocess(subprocess_args, logger=self.log) + except Exception: + self.log.error("Texture .rstexbin conversion failed", + exc_info=True) + six.reraise(*sys.exc_info()) + + return TextureResult( + path=destination, + file_hash=texture_hash, + colorspace=colorspace, + transfer_mode=COPY + ) + + @staticmethod + def get_redshift_tool(tool_name): + """Path to redshift texture processor. + + On Windows it adds .exe extension if missing from tool argument. + + Args: + tool_name (string): Tool name. + + Returns: + str: Full path to redshift texture processor executable. + """ + if "REDSHIFT_COREDATAPATH" not in os.environ: + raise RuntimeError("Must have Redshift available.") + + redshift_tool_path = os.path.join( + os.environ["REDSHIFT_COREDATAPATH"], + "bin", + tool_name + ) + + return find_executable(redshift_tool_path) + + +class MakeTX(TextureProcessor): + """Make `.tx` using `maketx` with some default settings. + + Some hardcoded arguments passed to `maketx` are based on the defaults used + in Arnold's txManager tool. + + """ + + extension = ".tx" + + def __init__(self, log=None): + super(MakeTX, self).__init__(log=log) + self.extra_args = [] + + def apply_settings(self, project_settings): + # Allow extra maketx arguments from project settings + args_settings = ( + project_settings["maya"]["publish"] + .get("ExtractLook", {}).get("maketx_arguments", []) + ) + extra_args = [] + for arg_data in args_settings: + argument = arg_data["argument"] + parameters = arg_data["parameters"] + if not argument: + self.log.debug("Ignoring empty parameter from " + "`maketx_arguments` setting..") + continue + + extra_args.append(argument) + extra_args.extend(parameters) + + self.extra_args = extra_args + + def process(self, + source, + colorspace, + color_management, + staging_dir): + """Process the texture. + + This function requires the `maketx` executable to be available in an + OpenImageIO toolset detectable by AYON. + + Args: + source (str): Path to source file. + colorspace (str): Colorspace of the source file. + color_management (dict): Maya Color management data from + `lib.get_color_management_preferences` + staging_dir (str): Output directory to write to. + + Returns: + TextureResult: The resulting texture information. + + """ + + try: + maketx_args = get_oiio_tool_args("maketx") + except ToolNotFoundError: + raise KnownPublishError( + "OpenImageIO is not available on the machine") + + # Define .tx filepath in staging if source file is not .tx + fname, ext = os.path.splitext(os.path.basename(source)) + if ext == ".tx": + # Do nothing if the source file is already a .tx file. + return TextureResult( + path=source, + file_hash=source_hash(source), + colorspace=colorspace, + transfer_mode=COPY + ) + + # Hardcoded default arguments for maketx conversion based on Arnold's + # txManager in Maya + args = [ + # unpremultiply before conversion (recommended when alpha present) + "--unpremult", + # use oiio-optimized settings for tile-size, planarconfig, metadata + "--oiio", + "--filter", "lanczos3", + ] + if color_management["enabled"]: + config_path = color_management["config"] + if not os.path.exists(config_path): + raise RuntimeError("OCIO config not found at: " + "{}".format(config_path)) + + render_colorspace = color_management["rendering_space"] + + self.log.debug("tx: converting colorspace {0} " + "-> {1}".format(colorspace, + render_colorspace)) + args.extend(["--colorconvert", colorspace, render_colorspace]) + args.extend(["--colorconfig", config_path]) + + else: + # Maya Color management is disabled. We cannot rely on an OCIO + self.log.debug("tx: Maya color management is disabled. No color " + "conversion will be applied to .tx conversion for: " + "{}".format(source)) + # Assume linear + render_colorspace = "linear" + + # Note: The texture hash is only reliable if we include any potential + # conversion arguments provide to e.g. `maketx` + hash_args = ["maketx"] + args + self.extra_args + texture_hash = source_hash(source, *hash_args) + + # Ensure folder exists + resources_dir = os.path.join(staging_dir, "resources") + if not os.path.exists(resources_dir): + os.makedirs(resources_dir) + + self.log.debug("Generating .tx file for %s .." % source) + + subprocess_args = maketx_args + [ + "-v", # verbose + "-u", # update mode + # --checknan doesn't influence the output file but aborts the + # conversion if it finds any. So we can avoid it for the file hash + "--checknan", + source + ] + + subprocess_args.extend(args) + if self.extra_args: + subprocess_args.extend(self.extra_args) + + # Add source hash attribute after other arguments for log readability + # Note: argument is excluded from the hash since it is the hash itself + subprocess_args.extend([ + "--sattrib", + "sourceHash", + texture_hash + ]) + + destination = os.path.join(resources_dir, fname + ".tx") + subprocess_args.extend(["-o", destination]) + + # We want to make sure we are explicit about what OCIO config gets + # used. So when we supply no --colorconfig flag that no fallback to + # an OCIO env var occurs. + env = os.environ.copy() + env.pop("OCIO", None) + + self.log.debug(" ".join(subprocess_args)) + try: + run_subprocess(subprocess_args, env=env) + except Exception: + self.log.error("Texture maketx conversion failed", + exc_info=True) + raise + + return TextureResult( + path=destination, + file_hash=texture_hash, + colorspace=render_colorspace, + transfer_mode=COPY + ) + + @staticmethod + def _has_arnold(): + """Return whether the arnold package is available and importable.""" + try: + import arnold # noqa: F401 + return True + except (ImportError, ModuleNotFoundError): + return False + + +class ExtractLook(publish.Extractor): + """Extract Look (Maya Scene + JSON) + + Only extracts the sets (shadingEngines and alike) alongside a .json file + that stores it relationships for the sets and "attribute" data for the + instance members. + + """ + + label = "Extract Look (Maya Scene + JSON)" + hosts = ["maya"] + families = ["look", "mvLook"] + order = pyblish.api.ExtractorOrder + 0.2 + scene_type = "ma" + look_data_type = "json" + + def get_maya_scene_type(self, instance): + """Get Maya scene type from settings. + + Args: + instance (pyblish.api.Instance): Instance with collected + project settings. + + """ + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass + + return "mayaAscii" if self.scene_type == "ma" else "mayaBinary" + + def process(self, instance): + """Plugin entry point. + + Args: + instance: Instance to process. + + """ + _scene_type = self.get_maya_scene_type(instance) + + # Define extract output file path + dir_path = self.staging_dir(instance) + maya_fname = "{0}.{1}".format(instance.name, self.scene_type) + json_fname = "{0}.{1}".format(instance.name, self.look_data_type) + maya_path = os.path.join(dir_path, maya_fname) + json_path = os.path.join(dir_path, json_fname) + + # Remove all members of the sets so they are not included in the + # exported file by accident + self.log.debug("Processing sets..") + lookdata = instance.data["lookData"] + relationships = lookdata["relationships"] + sets = list(relationships.keys()) + if not sets: + self.log.debug("No sets found for the look") + return + + # Specify texture processing executables to activate + # TODO: Load these more dynamically once we support more processors + processors = [] + context = instance.context + for key, Processor in { + # Instance data key to texture processor mapping + "maketx": MakeTX, + "rstex": MakeRSTexBin + }.items(): + if instance.data.get(key, False): + processor = Processor(log=self.log) + processor.apply_settings(context.data["project_settings"]) + processors.append(processor) + + if processors: + self.log.debug("Collected texture processors: " + "{}".format(processors)) + + self.log.debug("Processing resources..") + results = self.process_resources(instance, + staging_dir=dir_path, + processors=processors) + transfers = results["fileTransfers"] + hardlinks = results["fileHardlinks"] + hashes = results["fileHashes"] + remap = results["attrRemap"] + + # Extract in correct render layer + self.log.debug("Extracting look maya scene file: {}".format(maya_path)) + layer = instance.data.get("renderlayer", "defaultRenderLayer") + with lib.renderlayer(layer): + # TODO: Ensure membership edits don't become renderlayer overrides + with lib.empty_sets(sets, force=True): + # To avoid Maya trying to automatically remap the file + # textures relative to the `workspace -directory` we force + # it to a fake temporary workspace. This fixes textures + # getting incorrectly remapped. + with no_workspace_dir(): + with lib.attribute_values(remap): + with lib.maintained_selection(): + cmds.select(sets, noExpand=True) + cmds.file( + maya_path, + force=True, + typ=_scene_type, + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True, + ) + + # Write the JSON data + data = { + "attributes": lookdata["attributes"], + "relationships": relationships + } + + self.log.debug("Extracting json file: {}".format(json_path)) + with open(json_path, "w") as f: + json.dump(data, f) + + if "files" not in instance.data: + instance.data["files"] = [] + if "hardlinks" not in instance.data: + instance.data["hardlinks"] = [] + if "transfers" not in instance.data: + instance.data["transfers"] = [] + + instance.data["files"].append(maya_fname) + instance.data["files"].append(json_fname) + + if instance.data.get("representations") is None: + instance.data["representations"] = [] + + instance.data["representations"].append( + { + "name": self.scene_type, + "ext": self.scene_type, + "files": os.path.basename(maya_fname), + "stagingDir": os.path.dirname(maya_fname), + } + ) + instance.data["representations"].append( + { + "name": self.look_data_type, + "ext": self.look_data_type, + "files": os.path.basename(json_fname), + "stagingDir": os.path.dirname(json_fname), + } + ) + + # Set up the resources transfers/links for the integrator + instance.data["transfers"].extend(transfers) + instance.data["hardlinks"].extend(hardlinks) + + # Source hash for the textures + instance.data["sourceHashes"] = hashes + + self.log.debug("Extracted instance '%s' to: %s" % (instance.name, + maya_path)) + + def _set_resource_result_colorspace(self, resource, colorspace): + """Update resource resulting colorspace after texture processing""" + if "result_color_space" in resource: + if resource["result_color_space"] == colorspace: + return + + self.log.warning( + "Resource already has a resulting colorspace but is now " + "being overridden to a new one: {} -> {}".format( + resource["result_color_space"], colorspace + ) + ) + resource["result_color_space"] = colorspace + + def process_resources(self, instance, staging_dir, processors): + """Process all resources in the instance. + + It is assumed that all resources are nodes using file textures. + + Extract the textures to transfer, possibly convert with maketx and + remap the node paths to the destination path. Note that a source + might be included more than once amongst the resources as they could + be the input file to multiple nodes. + + """ + + resources = instance.data["resources"] + color_management = lib.get_color_management_preferences() + + # TODO: Temporary disable all hardlinking, due to the feature not being + # used or properly working. + self.log.info( + "Forcing copy instead of hardlink." + ) + force_copy = True + + if not force_copy and platform.system().lower() == "windows": + # Temporary fix to NOT create hardlinks on windows machines + self.log.warning( + "Forcing copy instead of hardlink due to issues on Windows..." + ) + force_copy = True + + destinations_cache = {} + + def get_resource_destination_cached(path): + """Get resource destination with cached result per filepath""" + if path not in destinations_cache: + destination = self.get_resource_destination( + path, instance.data["resourcesDir"], processors) + destinations_cache[path] = destination + return destinations_cache[path] + + # Process all resource's individual files + processed_files = {} + transfers = [] + hardlinks = [] + hashes = {} + remap = OrderedDict() + for resource in resources: + colorspace = resource["color_space"] + + for filepath in resource["files"]: + filepath = os.path.normpath(filepath) + + if filepath in processed_files: + # The file was already processed, likely due to usage by + # another resource in the scene. We confirm here it + # didn't do color spaces different than the current + # resource. + processed_file = processed_files[filepath] + self.log.debug( + "File was already processed. Likely used by another " + "resource too: {}".format(filepath) + ) + + if colorspace != processed_file["color_space"]: + self.log.warning( + "File '{}' was already processed using colorspace " + "'{}' instead of the current resource's " + "colorspace '{}'. The already processed texture " + "result's colorspace '{}' will be used." + "".format(filepath, + colorspace, + processed_file["color_space"], + processed_file["result_color_space"])) + + self._set_resource_result_colorspace( + resource, + colorspace=processed_file["result_color_space"] + ) + continue + + texture_result = self._process_texture( + filepath, + processors=processors, + staging_dir=staging_dir, + force_copy=force_copy, + color_management=color_management, + colorspace=colorspace + ) + + # Set the resulting color space on the resource + self._set_resource_result_colorspace( + resource, colorspace=texture_result.colorspace + ) + + processed_files[filepath] = { + "color_space": colorspace, + "result_color_space": texture_result.colorspace, + } + + source = texture_result.path + destination = get_resource_destination_cached(source) + if force_copy or texture_result.transfer_mode == COPY: + transfers.append((source, destination)) + self.log.debug('file will be copied {} -> {}'.format( + source, destination)) + elif texture_result.transfer_mode == HARDLINK: + hardlinks.append((source, destination)) + self.log.debug('file will be hardlinked {} -> {}'.format( + source, destination)) + + # Store the hashes from hash to destination to include in the + # database + hashes[texture_result.file_hash] = destination + + # Set up remapping attributes for the node during the publish + # The order of these can be important if one attribute directly + # affects another, e.g. we set colorspace after filepath because + # maya sometimes tries to guess the colorspace when changing + # filepaths (which is avoidable, but we don't want to have those + # attributes changed in the resulting publish) + # Remap filepath to publish destination + # TODO It would be much better if we could use the destination path + # from the actual processed texture results, but since the + # attribute will need to preserve tokens like , etc for + # now we will define the output path from the attribute value + # including the tokens to persist them. + filepath_attr = resource["attribute"] + remap[filepath_attr] = get_resource_destination_cached( + resource["source"] + ) + + # Preserve color space values (force value after filepath change) + # This will also trigger in the same order at end of context to + # ensure after context it's still the original value. + node = resource["node"] + if cmds.attributeQuery("colorSpace", node=node, exists=True): + color_space_attr = "{}.colorSpace".format(node) + remap[color_space_attr] = resource["result_color_space"] + + self.log.debug("Finished remapping destinations ...") + + return { + "fileTransfers": transfers, + "fileHardlinks": hardlinks, + "fileHashes": hashes, + "attrRemap": remap, + } + + def get_resource_destination(self, filepath, resources_dir, processors): + """Get resource destination path. + + This is utility function to change path if resource file name is + changed by some external tool like `maketx`. + + Args: + filepath (str): Resource source path + resources_dir (str): Destination dir for resources in publish. + processors (list): Texture processors converting resource. + + Returns: + str: Path to resource file + + """ + # Compute destination location + basename, ext = os.path.splitext(os.path.basename(filepath)) + + # Get extension from the last processor + for processor in reversed(processors): + processor_ext = processor.extension + if processor_ext and ext != processor_ext: + self.log.debug("Processor {} overrides extension to '{}' " + "for path: {}".format(processor, + processor_ext, + filepath)) + ext = processor_ext + break + + return os.path.join( + resources_dir, basename + ext + ) + + def _get_existing_hashed_texture(self, texture_hash): + """Return the first found filepath from a texture hash""" + + # If source has been published before with the same settings, + # then don't reprocess but hardlink from the original + existing = find_paths_by_hash(texture_hash) + if existing: + source = next((p for p in existing if os.path.exists(p)), None) + if source: + return source + else: + self.log.warning( + "Paths not found on disk, " + "skipping hardlink: {}".format(existing) + ) + + def _process_texture(self, + filepath, + processors, + staging_dir, + force_copy, + color_management, + colorspace): + """Process a single texture file on disk for publishing. + + This will: + 1. Check whether it's already published, if so it will do hardlink + (if the texture hash is found and force copy is not enabled) + 2. It will process the texture using the supplied texture + processors like MakeTX and MakeRSTexBin if enabled. + 3. Compute the destination path for the source file. + + Args: + filepath (str): The source file path to process. + processors (list): List of TextureProcessor processing the texture + staging_dir (str): The staging directory to write to. + force_copy (bool): Whether to force a copy even if a file hash + might have existed already in the project, otherwise + hardlinking the existing file is allowed. + color_management (dict): Maya's Color Management settings from + `lib.get_color_management_preferences` + colorspace (str): The source colorspace of the resources this + texture belongs to. + + Returns: + TextureResult: The texture result information. + """ + + if len(processors) > 1: + raise KnownPublishError( + "More than one texture processor not supported. " + "Current processors enabled: {}".format(processors) + ) + + for processor in processors: + self.log.debug("Processing texture {} with processor {}".format( + filepath, processor + )) + + processed_result = processor.process(filepath, + colorspace, + color_management, + staging_dir) + if not processed_result: + raise RuntimeError("Texture Processor {} returned " + "no result.".format(processor)) + self.log.debug("Generated processed " + "texture: {}".format(processed_result.path)) + + # TODO: Currently all processors force copy instead of allowing + # hardlinks using source hashes. This should be refactored + return processed_result + + # No texture processing for this file + texture_hash = source_hash(filepath) + if not force_copy: + existing = self._get_existing_hashed_texture(filepath) + if existing: + self.log.debug("Found hash in database, preparing hardlink..") + return TextureResult( + path=filepath, + file_hash=texture_hash, + colorspace=colorspace, + transfer_mode=HARDLINK + ) + + return TextureResult( + path=filepath, + file_hash=texture_hash, + colorspace=colorspace, + transfer_mode=COPY + ) + + +class ExtractModelRenderSets(ExtractLook): + """Extract model render attribute sets as model metadata + + Only extracts the render attrib sets (NO shadingEngines) alongside + a .json file that stores it relationships for the sets and "attribute" + data for the instance members. + + """ + + label = "Model Render Sets" + hosts = ["maya"] + families = ["model"] + scene_type_prefix = "meta.render." + look_data_type = "meta.render.json" + + def get_maya_scene_type(self, instance): + typ = super(ExtractModelRenderSets, self).get_maya_scene_type(instance) + # add prefix + self.scene_type = self.scene_type_prefix + self.scene_type + + return typ diff --git a/client/ayon_maya/plugins/publish/extract_maya_scene_raw.py b/client/ayon_maya/plugins/publish/extract_maya_scene_raw.py new file mode 100644 index 00000000..bc0a9afd --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_maya_scene_raw.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +"""Extract data as Maya scene (raw).""" +import os + +from maya import cmds + +from ayon_maya.api.lib import maintained_selection +from ayon_core.pipeline import ( + AYON_CONTAINER_ID, + AVALON_CONTAINER_ID, + publish, +) +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.lib import BoolDef + + +class ExtractMayaSceneRaw(publish.Extractor, AYONPyblishPluginMixin): + """Extract as Maya Scene (raw). + + This will preserve all references, construction history, etc. + """ + + label = "Maya Scene (Raw)" + hosts = ["maya"] + families = ["mayaAscii", + "mayaScene", + "setdress", + "layout", + "camerarig"] + scene_type = "ma" + + @classmethod + def get_attribute_defs(cls): + return [ + BoolDef( + "preserve_references", + label="Preserve References", + tooltip=( + "When enabled references will still be references " + "in the published file.\nWhen disabled the references " + "are imported into the published file generating a " + "file without references." + ), + default=True + ) + ] + + def process(self, instance): + """Plugin entry point.""" + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass + # Define extract output file path + dir_path = self.staging_dir(instance) + filename = "{0}.{1}".format(instance.name, self.scene_type) + path = os.path.join(dir_path, filename) + + # Whether to include all nodes in the instance (including those from + # history) or only use the exact set members + members_only = instance.data.get("exactSetMembersOnly", False) + if members_only: + members = instance.data.get("setMembers", list()) + if not members: + raise RuntimeError("Can't export 'exact set members only' " + "when set is empty.") + else: + members = instance[:] + + selection = members + if set(self.add_for_families).intersection( + set(instance.data.get("families", []))) or \ + instance.data.get("productType") in self.add_for_families: + selection += self._get_loaded_containers(members) + + # Perform extraction + self.log.debug("Performing extraction ...") + attribute_values = self.get_attr_values_from_data( + instance.data + ) + with maintained_selection(): + cmds.select(selection, noExpand=True) + cmds.file(path, + force=True, + typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501 + exportSelected=True, + preserveReferences=attribute_values[ + "preserve_references" + ], + constructionHistory=True, + shader=True, + constraints=True, + expressions=True) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': filename, + "stagingDir": dir_path + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '%s' to: %s" % (instance.name, + path)) + + @staticmethod + def _get_loaded_containers(members): + # type: (list) -> list + refs_to_include = { + cmds.referenceQuery(node, referenceNode=True) + for node in members + if cmds.referenceQuery(node, isNodeReferenced=True) + } + + members_with_refs = refs_to_include.union(members) + + obj_sets = cmds.ls("*.id", long=True, type="objectSet", recursive=True, + objectsOnly=True) + + loaded_containers = [] + for obj_set in obj_sets: + + if not cmds.attributeQuery("id", node=obj_set, exists=True): + continue + + id_attr = "{}.id".format(obj_set) + if cmds.getAttr(id_attr) not in { + AYON_CONTAINER_ID, AVALON_CONTAINER_ID + }: + continue + + set_content = set(cmds.sets(obj_set, query=True)) + if set_content.intersection(members_with_refs): + loaded_containers.append(obj_set) + + return loaded_containers diff --git a/client/ayon_maya/plugins/publish/extract_maya_usd.py b/client/ayon_maya/plugins/publish/extract_maya_usd.py new file mode 100644 index 00000000..b6f6529c --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_maya_usd.py @@ -0,0 +1,293 @@ +import os +import six +import json +import contextlib + +from maya import cmds + +import pyblish.api +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + + +@contextlib.contextmanager +def usd_export_attributes(nodes, attrs=None, attr_prefixes=None, mapping=None): + """Define attributes for the given nodes that should be exported. + + MayaUSDExport will export custom attributes if the Maya node has a + string attribute `USD_UserExportedAttributesJson` that provides an + export mapping for the maya attributes. This context manager will try + to autogenerate such an attribute during the export to include attributes + for the export. + + Arguments: + nodes (List[str]): Nodes to process. + attrs (Optional[List[str]]): Full name of attributes to include. + attr_prefixes (Optional[List[str]]): Prefixes of attributes to include. + mapping (Optional[Dict[Dict]]): A mapping per attribute name for the + conversion to a USD attribute, including renaming, defining type, + converting attribute precision, etc. This match the usual + `USD_UserExportedAttributesJson` json mapping of `mayaUSDExport`. + When no mapping provided for an attribute it will use `{}` as + value. + + Examples: + >>> with usd_export_attributes( + >>> ["pCube1"], attrs="myDoubleAttributeAsFloat", mapping={ + >>> "myDoubleAttributeAsFloat": { + >>> "usdAttrName": "my:namespace:attrib", + >>> "translateMayaDoubleToUsdSinglePrecision": True, + >>> } + >>> }) + + """ + # todo: this might be better done with a custom export chaser + # see `chaser` argument for `mayaUSDExport` + + import maya.api.OpenMaya as om + + if not attrs and not attr_prefixes: + # context manager does nothing + yield + return + + if attrs is None: + attrs = [] + if attr_prefixes is None: + attr_prefixes = [] + if mapping is None: + mapping = {} + + usd_json_attr = "USD_UserExportedAttributesJson" + strings = attrs + ["{}*".format(prefix) for prefix in attr_prefixes] + context_state = {} + for node in set(nodes): + node_attrs = cmds.listAttr(node, st=strings) + if not node_attrs: + # Nothing to do for this node + continue + + node_attr_data = {} + for node_attr in set(node_attrs): + node_attr_data[node_attr] = mapping.get(node_attr, {}) + + if cmds.attributeQuery(usd_json_attr, node=node, exists=True): + existing_node_attr_value = cmds.getAttr( + "{}.{}".format(node, usd_json_attr) + ) + if existing_node_attr_value and existing_node_attr_value != "{}": + # Any existing attribute mappings in an existing + # `USD_UserExportedAttributesJson` attribute always take + # precedence over what this function tries to imprint + existing_node_attr_data = json.loads(existing_node_attr_value) + node_attr_data.update(existing_node_attr_data) + + context_state[node] = json.dumps(node_attr_data) + + sel = om.MSelectionList() + dg_mod = om.MDGModifier() + fn_string = om.MFnStringData() + fn_typed = om.MFnTypedAttribute() + try: + for node, value in context_state.items(): + data = fn_string.create(value) + sel.clear() + if cmds.attributeQuery(usd_json_attr, node=node, exists=True): + # Set the attribute value + sel.add("{}.{}".format(node, usd_json_attr)) + plug = sel.getPlug(0) + dg_mod.newPlugValue(plug, data) + else: + # Create attribute with the value as default value + sel.add(node) + node_obj = sel.getDependNode(0) + attr_obj = fn_typed.create(usd_json_attr, + usd_json_attr, + om.MFnData.kString, + data) + dg_mod.addAttribute(node_obj, attr_obj) + dg_mod.doIt() + yield + finally: + dg_mod.undoIt() + + +class ExtractMayaUsd(publish.Extractor): + """Extractor for Maya USD Asset data. + + Upon publish a .usd (or .usdz) asset file will typically be written. + """ + + label = "Extract Maya USD Asset" + hosts = ["maya"] + families = ["mayaUsd"] + + @property + def options(self): + """Overridable options for Maya USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + # TODO: Support more `mayaUSDExport` parameters + return { + "defaultUSDFormat": str, + "stripNamespaces": bool, + "mergeTransformAndShape": bool, + "exportDisplayColor": bool, + "exportColorSets": bool, + "exportInstances": bool, + "exportUVs": bool, + "exportVisibility": bool, + "exportComponentTags": bool, + "exportRefsAsInstanceable": bool, + "eulerFilter": bool, + "renderableOnly": bool, + "jobContext": (list, None) # optional list + # "worldspace": bool, + } + + @property + def default_options(self): + """The default options for Maya USD Export.""" + + # TODO: Support more `mayaUSDExport` parameters + return { + "defaultUSDFormat": "usdc", + "stripNamespaces": False, + "mergeTransformAndShape": False, + "exportDisplayColor": False, + "exportColorSets": True, + "exportInstances": True, + "exportUVs": True, + "exportVisibility": True, + "exportComponentTags": True, + "exportRefsAsInstanceable": False, + "eulerFilter": True, + "renderableOnly": False, + "jobContext": None + # "worldspace": False + } + + def parse_overrides(self, instance, options): + """Inspect data of instance to determine overridden options""" + + for key in instance.data: + if key not in self.options: + continue + + # Ensure the data is of correct type + value = instance.data[key] + if isinstance(value, six.text_type): + value = str(value) + if not isinstance(value, self.options[key]): + self.log.warning( + "Overridden attribute {key} was of " + "the wrong type: {invalid_type} " + "- should have been {valid_type}".format( + key=key, + invalid_type=type(value).__name__, + valid_type=self.options[key].__name__)) + continue + + options[key] = value + + return options + + def filter_members(self, members): + # Can be overridden by inherited classes + return members + + def process(self, instance): + + # Load plugin first + cmds.loadPlugin("mayaUsdPlugin", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_name = "{0}.usd".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Parse export options + options = self.default_options + options = self.parse_overrides(instance, options) + self.log.debug("Export options: {0}".format(options)) + + # Perform extraction + self.log.debug("Performing extraction ...") + + members = instance.data("setMembers") + self.log.debug('Collected objects: {}'.format(members)) + members = self.filter_members(members) + if not members: + self.log.error('No members!') + return + + start = instance.data["frameStartHandle"] + end = instance.data["frameEndHandle"] + + def parse_attr_str(attr_str): + result = list() + for attr in attr_str.split(","): + attr = attr.strip() + if not attr: + continue + result.append(attr) + return result + + attrs = parse_attr_str(instance.data.get("attr", "")) + attrs += instance.data.get("userDefinedAttributes", []) + attrs += ["cbId"] + attr_prefixes = parse_attr_str(instance.data.get("attrPrefix", "")) + + self.log.debug('Exporting USD: {} / {}'.format(file_path, members)) + with maintained_selection(): + with usd_export_attributes(instance[:], + attrs=attrs, + attr_prefixes=attr_prefixes): + cmds.mayaUSDExport(file=file_path, + frameRange=(start, end), + frameStride=instance.data.get("step", 1.0), + exportRoots=members, + **options) + + representation = { + 'name': "usd", + 'ext': "usd", + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data.setdefault("representations", []).append(representation) + + self.log.debug( + "Extracted instance {} to {}".format(instance.name, file_path) + ) + + +class ExtractMayaUsdAnim(ExtractMayaUsd): + """Extractor for Maya USD Animation Sparse Cache data. + + This will extract the sparse cache data from the scene and generate a + USD file with all the animation data. + + Upon publish a .usd sparse cache will be written. + """ + label = "Extract Maya USD Animation Sparse Cache" + families = ["animation", "mayaUsd"] + match = pyblish.api.Subset + + def filter_members(self, members): + out_set = next((i for i in members if i.endswith("out_SET")), None) + + if out_set is None: + self.log.warning("Expecting out_SET") + return None + + members = cmds.ls(cmds.sets(out_set, query=True), long=True) + return members diff --git a/client/ayon_maya/plugins/publish/extract_model.py b/client/ayon_maya/plugins/publish/extract_model.py new file mode 100644 index 00000000..b439d05a --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_model.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +"""Extract model as Maya Scene.""" +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api import lib + + +class ExtractModel(publish.Extractor, + publish.OptionalPyblishPluginMixin): + """Extract as Model (Maya Scene). + + Only extracts contents based on the original "setMembers" data to ensure + publishing the least amount of required shapes. From that it only takes + the shapes that are not intermediateObjects + + During export it sets a temporary context to perform a clean extraction. + The context ensures: + - Smooth preview is turned off for the geometry + - Default shader is assigned (no materials are exported) + - Remove display layers + + """ + + label = "Model (Maya Scene)" + hosts = ["maya"] + families = ["model"] + scene_type = "ma" + optional = True + + def process(self, instance): + """Plugin entry point.""" + if not self.is_active(instance.data): + return + + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = "{0}.{1}".format(instance.name, self.scene_type) + path = os.path.join(stagingdir, filename) + + # Perform extraction + self.log.debug("Performing extraction ...") + + # Get only the shape contents we need in such a way that we avoid + # taking along intermediateObjects + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=True, + type=("mesh", "nurbsCurve"), + noIntermediate=True, + long=True) + + with lib.no_display_layers(instance): + with lib.displaySmoothness(members, + divisionsU=0, + divisionsV=0, + pointsWire=4, + pointsShaded=1, + polygonObject=1): + with lib.shader(members, + shadingEngine="initialShadingGroup"): + with lib.maintained_selection(): + cmds.select(members, noExpand=True) + cmds.file(path, + force=True, + typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501 + exportSelected=True, + preserveReferences=False, + channels=False, + constraints=False, + expressions=False, + constructionHistory=False) + + # Store reference for integration + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '%s' to: %s" % (instance.name, + path)) diff --git a/client/ayon_maya/plugins/publish/extract_multiverse_look.py b/client/ayon_maya/plugins/publish/extract_multiverse_look.py new file mode 100644 index 00000000..d69c1797 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_multiverse_look.py @@ -0,0 +1,157 @@ +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + + +class ExtractMultiverseLook(publish.Extractor): + """Extractor for Multiverse USD look data. + + This will extract: + + - the shading networks that are assigned in MEOW as Maya material overrides + to a Multiverse Compound + - settings for a Multiverse Write Override operation. + + Relevant settings are visible in the Maya set node created by a Multiverse + USD Look instance creator. + + The input data contained in the set is: + + - a single Multiverse Compound node with any number of Maya material + overrides (typically set in MEOW) + + Upon publish two files will be written: + + - a .usda override file containing material assignment information + - a .ma file containing shading networks + + Note: when layering the material assignment override on a loaded Compound, + remember to set a matching attribute override with the namespace of + the loaded compound in order for the material assignment to resolve. + """ + + label = "Extract Multiverse USD Look" + hosts = ["maya"] + families = ["mvLook"] + scene_type = "usda" + file_formats = ["usda", "usd"] + + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "writeAll": bool, + "writeTransforms": bool, + "writeVisibility": bool, + "writeAttributes": bool, + "writeMaterials": bool, + "writeVariants": bool, + "writeVariantsDefinition": bool, + "writeActiveState": bool, + "writeNamespaces": bool, + "numTimeSamples": int, + "timeSamplesSpan": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + + return { + "writeAll": False, + "writeTransforms": False, + "writeVisibility": False, + "writeAttributes": True, + "writeMaterials": True, + "writeVariants": False, + "writeVariantsDefinition": False, + "writeActiveState": False, + "writeNamespaces": True, + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 + } + + def get_file_format(self, instance): + fileFormat = instance.data["fileFormat"] + if fileFormat in range(len(self.file_formats)): + self.scene_type = self.file_formats[fileFormat] + + def process(self, instance): + # Load plugin first + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + self.get_file_format(instance) + file_name = "{0}.{1}".format(instance.name, self.scene_type) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Parse export options + options = self.default_options + self.log.debug("Export options: {0}".format(options)) + + # Perform extraction + self.log.debug("Performing extraction ...") + + with maintained_selection(): + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=False, + type="mvUsdCompoundShape", + noIntermediate=True, + long=True) + self.log.debug('Collected object {}'.format(members)) + if len(members) > 1: + self.log.error('More than one member: {}'.format(members)) + + import multiverse + + over_write_opts = multiverse.OverridesWriteOptions() + options_discard_keys = { + "numTimeSamples", + "timeSamplesSpan", + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "step", + "fps" + } + for key, value in options.items(): + if key in options_discard_keys: + continue + setattr(over_write_opts, key, value) + + for member in members: + # @TODO: Make sure there is only one here. + + self.log.debug("Writing Override for '{}'".format(member)) + multiverse.WriteOverrides(file_path, member, over_write_opts) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance {} to {}".format( + instance.name, file_path)) diff --git a/client/ayon_maya/plugins/publish/extract_multiverse_usd.py b/client/ayon_maya/plugins/publish/extract_multiverse_usd.py new file mode 100644 index 00000000..a7870936 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_multiverse_usd.py @@ -0,0 +1,276 @@ +import os +import six + +from maya import cmds +from maya import mel + +import pyblish.api +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + + +class ExtractMultiverseUsd(publish.Extractor): + """Extractor for Multiverse USD Asset data. + + This will extract settings for a Multiverse Write Asset operation: + they are visible in the Maya set node created by a Multiverse USD + Asset instance creator. + + The input data contained in the set is: + + - a single hierarchy of Maya nodes. Multiverse supports a variety of Maya + nodes such as transforms, mesh, curves, particles, instances, particle + instancers, pfx, MASH, lights, cameras, joints, connected materials, + shading networks etc. including many of their attributes. + + Upon publish a .usd (or .usdz) asset file will be typically written. + """ + + label = "Extract Multiverse USD Asset" + hosts = ["maya"] + families = ["mvUsd"] + scene_type = "usd" + file_formats = ["usd", "usda", "usdz"] + + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "stripNamespaces": bool, + "mergeTransformAndShape": bool, + "writeAncestors": bool, + "flattenParentXforms": bool, + "writeSparseOverrides": bool, + "useMetaPrimPath": bool, + "customRootPath": str, + "customAttributes": str, + "nodeTypesToIgnore": str, + "writeMeshes": bool, + "writeCurves": bool, + "writeParticles": bool, + "writeCameras": bool, + "writeLights": bool, + "writeJoints": bool, + "writeCollections": bool, + "writePositions": bool, + "writeNormals": bool, + "writeUVs": bool, + "writeColorSets": bool, + "writeTangents": bool, + "writeRefPositions": bool, + "writeBlendShapes": bool, + "writeDisplayColor": bool, + "writeSkinWeights": bool, + "writeMaterialAssignment": bool, + "writeHardwareShader": bool, + "writeShadingNetworks": bool, + "writeTransformMatrix": bool, + "writeUsdAttributes": bool, + "writeInstancesAsReferences": bool, + "timeVaryingTopology": bool, + "customMaterialNamespace": str, + "numTimeSamples": int, + "timeSamplesSpan": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + + return { + "stripNamespaces": False, + "mergeTransformAndShape": False, + "writeAncestors": False, + "flattenParentXforms": False, + "writeSparseOverrides": False, + "useMetaPrimPath": False, + "customRootPath": str(), + "customAttributes": str(), + "nodeTypesToIgnore": str(), + "writeMeshes": True, + "writeCurves": True, + "writeParticles": True, + "writeCameras": False, + "writeLights": False, + "writeJoints": False, + "writeCollections": False, + "writePositions": True, + "writeNormals": True, + "writeUVs": True, + "writeColorSets": False, + "writeTangents": False, + "writeRefPositions": False, + "writeBlendShapes": False, + "writeDisplayColor": False, + "writeSkinWeights": False, + "writeMaterialAssignment": False, + "writeHardwareShader": False, + "writeShadingNetworks": False, + "writeTransformMatrix": True, + "writeUsdAttributes": False, + "writeInstancesAsReferences": False, + "timeVaryingTopology": False, + "customMaterialNamespace": str(), + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 + } + + def parse_overrides(self, instance, options): + """Inspect data of instance to determine overridden options""" + + for key in instance.data: + if key not in self.options: + continue + + # Ensure the data is of correct type + value = instance.data[key] + if isinstance(value, six.text_type): + value = str(value) + if not isinstance(value, self.options[key]): + self.log.warning( + "Overridden attribute {key} was of " + "the wrong type: {invalid_type} " + "- should have been {valid_type}".format( + key=key, + invalid_type=type(value).__name__, + valid_type=self.options[key].__name__)) + continue + + options[key] = value + + return options + + def get_default_options(self): + return self.default_options + + def filter_members(self, members): + return members + + def process(self, instance): + + # Load plugin first + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_format = instance.data.get("fileFormat", 0) + if file_format in range(len(self.file_formats)): + self.scene_type = self.file_formats[file_format] + file_name = "{0}.{1}".format(instance.name, self.scene_type) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Parse export options + options = self.get_default_options() + options = self.parse_overrides(instance, options) + self.log.debug("Export options: {0}".format(options)) + + # Perform extraction + self.log.debug("Performing extraction ...") + + with maintained_selection(): + members = instance.data("setMembers") + self.log.debug('Collected objects: {}'.format(members)) + members = self.filter_members(members) + if not members: + self.log.error('No members!') + return + self.log.debug(' - filtered: {}'.format(members)) + + import multiverse + + time_opts = None + frame_start = instance.data['frameStart'] + frame_end = instance.data['frameEnd'] + if frame_end != frame_start: + time_opts = multiverse.TimeOptions() + + time_opts.writeTimeRange = True + + handle_start = instance.data['handleStart'] + handle_end = instance.data['handleEnd'] + + time_opts.frameRange = ( + frame_start - handle_start, frame_end + handle_end) + time_opts.frameIncrement = instance.data['step'] + time_opts.numTimeSamples = instance.data.get( + 'numTimeSamples', options['numTimeSamples']) + time_opts.timeSamplesSpan = instance.data.get( + 'timeSamplesSpan', options['timeSamplesSpan']) + time_opts.framePerSecond = instance.data.get( + 'fps', mel.eval('currentTimeUnitToFPS()')) + + asset_write_opts = multiverse.AssetWriteOptions(time_opts) + options_discard_keys = { + 'numTimeSamples', + 'timeSamplesSpan', + 'frameStart', + 'frameEnd', + 'handleStart', + 'handleEnd', + 'step', + 'fps' + } + self.log.debug("Write Options:") + for key, value in options.items(): + if key in options_discard_keys: + continue + + self.log.debug(" - {}={}".format(key, value)) + setattr(asset_write_opts, key, value) + + self.log.debug('WriteAsset: {} / {}'.format(file_path, members)) + multiverse.WriteAsset(file_path, members, asset_write_opts) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance {} to {}".format( + instance.name, file_path)) + + +class ExtractMultiverseUsdAnim(ExtractMultiverseUsd): + """Extractor for Multiverse USD Animation Sparse Cache data. + + This will extract the sparse cache data from the scene and generate a + USD file with all the animation data. + + Upon publish a .usd sparse cache will be written. + """ + label = "Extract Multiverse USD Animation Sparse Cache" + families = ["animation", "usd"] + match = pyblish.api.Subset + + def get_default_options(self): + anim_options = self.default_options + anim_options["writeSparseOverrides"] = True + anim_options["writeUsdAttributes"] = True + anim_options["stripNamespaces"] = True + return anim_options + + def filter_members(self, members): + out_set = next((i for i in members if i.endswith("out_SET")), None) + + if out_set is None: + self.log.warning("Expecting out_SET") + return None + + members = cmds.ls(cmds.sets(out_set, query=True), long=True) + return members diff --git a/client/ayon_maya/plugins/publish/extract_multiverse_usd_comp.py b/client/ayon_maya/plugins/publish/extract_multiverse_usd_comp.py new file mode 100644 index 00000000..eac150ec --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_multiverse_usd_comp.py @@ -0,0 +1,179 @@ +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + + +class ExtractMultiverseUsdComposition(publish.Extractor): + """Extractor of Multiverse USD Composition data. + + This will extract settings for a Multiverse Write Composition operation: + they are visible in the Maya set node created by a Multiverse USD + Composition instance creator. + + The input data contained in the set is either: + + - a single hierarchy consisting of several Multiverse Compound nodes, with + any number of layers, and Maya transform nodes + - a single Compound node with more than one layer (in this case the "Write + as Compound Layers" option should be set). + + Upon publish a .usda composition file will be written. + """ + + label = "Extract Multiverse USD Composition" + hosts = ["maya"] + families = ["mvUsdComposition"] + scene_type = "usd" + # Order of `fileFormat` must match create_multiverse_usd_comp.py + file_formats = ["usda", "usd"] + + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "stripNamespaces": bool, + "mergeTransformAndShape": bool, + "flattenContent": bool, + "writeAsCompoundLayers": bool, + "writePendingOverrides": bool, + "numTimeSamples": int, + "timeSamplesSpan": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + + return { + "stripNamespaces": True, + "mergeTransformAndShape": False, + "flattenContent": False, + "writeAsCompoundLayers": False, + "writePendingOverrides": False, + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 + } + + def parse_overrides(self, instance, options): + """Inspect data of instance to determine overridden options""" + + for key in instance.data: + if key not in self.options: + continue + + # Ensure the data is of correct type + value = instance.data[key] + if not isinstance(value, self.options[key]): + self.log.warning( + "Overridden attribute {key} was of " + "the wrong type: {invalid_type} " + "- should have been {valid_type}".format( + key=key, + invalid_type=type(value).__name__, + valid_type=self.options[key].__name__)) + continue + + options[key] = value + + return options + + def process(self, instance): + # Load plugin first + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_format = instance.data.get("fileFormat", 0) + if file_format in range(len(self.file_formats)): + self.scene_type = self.file_formats[file_format] + file_name = "{0}.{1}".format(instance.name, self.scene_type) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace('\\', '/') + + # Parse export options + options = self.default_options + options = self.parse_overrides(instance, options) + self.log.debug("Export options: {0}".format(options)) + + # Perform extraction + self.log.debug("Performing extraction ...") + + with maintained_selection(): + members = instance.data("setMembers") + self.log.debug('Collected object {}'.format(members)) + + import multiverse + + time_opts = None + frame_start = instance.data['frameStart'] + frame_end = instance.data['frameEnd'] + handle_start = instance.data['handleStart'] + handle_end = instance.data['handleEnd'] + step = instance.data['step'] + fps = instance.data['fps'] + if frame_end != frame_start: + time_opts = multiverse.TimeOptions() + + time_opts.writeTimeRange = True + time_opts.frameRange = ( + frame_start - handle_start, frame_end + handle_end) + time_opts.frameIncrement = step + time_opts.numTimeSamples = instance.data["numTimeSamples"] + time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] + time_opts.framePerSecond = fps + + comp_write_opts = multiverse.CompositionWriteOptions() + + """ + OP tells MV to write to a staging directory, and then moves the + file to it's final publish directory. By default, MV write relative + paths, but these paths will break when the referencing file moves. + This option forces writes to absolute paths, which is ok within OP + because all published assets have static paths, and MV can only + reference published assets. When a proper UsdAssetResolver is used, + this won't be needed. + """ + comp_write_opts.forceAbsolutePaths = True + + options_discard_keys = { + 'numTimeSamples', + 'timeSamplesSpan', + 'frameStart', + 'frameEnd', + 'handleStart', + 'handleEnd', + 'step', + 'fps' + } + for key, value in options.items(): + if key in options_discard_keys: + continue + setattr(comp_write_opts, key, value) + + multiverse.WriteComposition(file_path, members, comp_write_opts) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance {} to {}".format(instance.name, + file_path)) diff --git a/client/ayon_maya/plugins/publish/extract_multiverse_usd_over.py b/client/ayon_maya/plugins/publish/extract_multiverse_usd_over.py new file mode 100644 index 00000000..d1e806da --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_multiverse_usd_over.py @@ -0,0 +1,157 @@ +import os + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + +from maya import cmds + + +class ExtractMultiverseUsdOverride(publish.Extractor): + """Extractor for Multiverse USD Override data. + + This will extract settings for a Multiverse Write Override operation: + they are visible in the Maya set node created by a Multiverse USD + Override instance creator. + + The input data contained in the set is: + + - a single Multiverse Compound node with any number of overrides (typically + set in MEOW) + + Upon publish a .usda override file will be written. + """ + + label = "Extract Multiverse USD Override" + hosts = ["maya"] + families = ["mvUsdOverride"] + scene_type = "usd" + # Order of `fileFormat` must match create_multiverse_usd_over.py + file_formats = ["usda", "usd"] + + @property + def options(self): + """Overridable options for Multiverse USD Export + + Given in the following format + - {NAME: EXPECTED TYPE} + + If the overridden option's type does not match, + the option is not included and a warning is logged. + + """ + + return { + "writeAll": bool, + "writeTransforms": bool, + "writeVisibility": bool, + "writeAttributes": bool, + "writeMaterials": bool, + "writeVariants": bool, + "writeVariantsDefinition": bool, + "writeActiveState": bool, + "writeNamespaces": bool, + "numTimeSamples": int, + "timeSamplesSpan": float + } + + @property + def default_options(self): + """The default options for Multiverse USD extraction.""" + + return { + "writeAll": False, + "writeTransforms": True, + "writeVisibility": True, + "writeAttributes": True, + "writeMaterials": True, + "writeVariants": True, + "writeVariantsDefinition": True, + "writeActiveState": True, + "writeNamespaces": False, + "numTimeSamples": 1, + "timeSamplesSpan": 0.0 + } + + def process(self, instance): + # Load plugin first + cmds.loadPlugin("MultiverseForMaya", quiet=True) + + # Define output file path + staging_dir = self.staging_dir(instance) + file_format = instance.data.get("fileFormat", 0) + if file_format in range(len(self.file_formats)): + self.scene_type = self.file_formats[file_format] + file_name = "{0}.{1}".format(instance.name, self.scene_type) + file_path = os.path.join(staging_dir, file_name) + file_path = file_path.replace("\\", "/") + + # Parse export options + options = self.default_options + self.log.debug("Export options: {0}".format(options)) + + # Perform extraction + self.log.debug("Performing extraction ...") + + with maintained_selection(): + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=False, + type="mvUsdCompoundShape", + noIntermediate=True, + long=True) + self.log.debug("Collected object {}".format(members)) + + # TODO: Deal with asset, composition, override with options. + import multiverse + + time_opts = None + frame_start = instance.data["frameStart"] + frame_end = instance.data["frameEnd"] + handle_start = instance.data["handleStart"] + handle_end = instance.data["handleEnd"] + step = instance.data["step"] + fps = instance.data["fps"] + if frame_end != frame_start: + time_opts = multiverse.TimeOptions() + + time_opts.writeTimeRange = True + time_opts.frameRange = ( + frame_start - handle_start, frame_end + handle_end) + time_opts.frameIncrement = step + time_opts.numTimeSamples = instance.data["numTimeSamples"] + time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"] + time_opts.framePerSecond = fps + + over_write_opts = multiverse.OverridesWriteOptions(time_opts) + options_discard_keys = { + "numTimeSamples", + "timeSamplesSpan", + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "step", + "fps" + } + for key, value in options.items(): + if key in options_discard_keys: + continue + setattr(over_write_opts, key, value) + + for member in members: + multiverse.WriteOverrides(file_path, member, over_write_opts) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': file_name, + 'stagingDir': staging_dir + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance {} to {}".format( + instance.name, file_path)) diff --git a/client/ayon_maya/plugins/publish/extract_obj.py b/client/ayon_maya/plugins/publish/extract_obj.py new file mode 100644 index 00000000..16204b60 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_obj.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +import os + +from maya import cmds +import pyblish.api +from ayon_core.pipeline import publish +from ayon_maya.api import lib + + +class ExtractObj(publish.Extractor): + """Extract OBJ from Maya. + + This extracts reproducible OBJ exports ignoring any of the settings + set on the local machine in the OBJ export options window. + + """ + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract OBJ" + families = ["model"] + + def process(self, instance): + + # Define output path + + staging_dir = self.staging_dir(instance) + filename = "{0}.obj".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need to + # format it into a string in a mel expression + + self.log.debug("Extracting OBJ to: {0}".format(path)) + + members = instance.data("setMembers") + members = cmds.ls(members, + dag=True, + shapes=True, + type=("mesh", "nurbsCurve"), + noIntermediate=True, + long=True) + self.log.debug("Members: {0}".format(members)) + self.log.debug("Instance: {0}".format(instance[:])) + + if not cmds.pluginInfo('objExport', query=True, loaded=True): + cmds.loadPlugin('objExport') + + # Export + with lib.no_display_layers(instance): + with lib.displaySmoothness(members, + divisionsU=0, + divisionsV=0, + pointsWire=4, + pointsShaded=1, + polygonObject=1): + with lib.shader(members, + shadingEngine="initialShadingGroup"): + with lib.maintained_selection(): + cmds.select(members, noExpand=True) + cmds.file(path, + exportSelected=True, + type='OBJexport', + preserveReferences=True, + force=True) + + if "representation" not in instance.data: + instance.data["representation"] = [] + + representation = { + 'name': 'obj', + 'ext': 'obj', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extract OBJ successful to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_playblast.py b/client/ayon_maya/plugins/publish/extract_playblast.py new file mode 100644 index 00000000..8a94b24e --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_playblast.py @@ -0,0 +1,106 @@ +import os + +import clique + +from ayon_core.pipeline import publish +from ayon_maya.api import lib + +from maya import cmds + + +class ExtractPlayblast(publish.Extractor): + """Extract viewport playblast. + + Takes review camera and creates review Quicktime video based on viewport + capture. + + """ + + label = "Extract Playblast" + hosts = ["maya"] + families = ["review"] + optional = True + capture_preset = {} + profiles = None + + def process(self, instance): + self.log.debug("Extracting playblast..") + + # get scene fps + fps = instance.data.get("fps") or instance.context.data.get("fps") + + # if start and end frames cannot be determined, get them + # from Maya timeline + start = instance.data.get("frameStartFtrack") + end = instance.data.get("frameEndFtrack") + if start is None: + start = cmds.playbackOptions(query=True, animationStartTime=True) + if end is None: + end = cmds.playbackOptions(query=True, animationEndTime=True) + + self.log.debug("start: {}, end: {}".format(start, end)) + task_data = instance.data["anatomyData"].get("task", {}) + capture_preset = lib.get_capture_preset( + task_data.get("name"), + task_data.get("type"), + instance.data["productName"], + instance.context.data["project_settings"], + self.log + ) + stagingdir = self.staging_dir(instance) + filename = instance.name + path = os.path.join(stagingdir, filename) + self.log.debug("Outputting images to %s" % path) + # get cameras + camera = instance.data["review_camera"] + preset = lib.generate_capture_preset( + instance, camera, path, + start=start, end=end, + capture_preset=capture_preset) + lib.render_capture_preset(preset) + + # Find playblast sequence + collected_files = os.listdir(stagingdir) + patterns = [clique.PATTERNS["frames"]] + collections, remainder = clique.assemble(collected_files, + minimum_items=1, + patterns=patterns) + + self.log.debug("Searching playblast collection for: %s", path) + frame_collection = None + for collection in collections: + filebase = collection.format("{head}").rstrip(".") + self.log.debug("Checking collection head: %s", filebase) + if filebase in path: + frame_collection = collection + self.log.debug( + "Found playblast collection: %s", frame_collection + ) + + tags = ["review"] + if not instance.data.get("keepImages"): + tags.append("delete") + + # Add camera node name to representation data + camera_node_name = cmds.listRelatives(camera, parent=True)[0] + + collected_files = list(frame_collection) + # single frame file shouldn't be in list, only as a string + if len(collected_files) == 1: + collected_files = collected_files[0] + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": capture_preset["Codec"]["compression"], + "ext": capture_preset["Codec"]["compression"], + "files": collected_files, + "stagingDir": stagingdir, + "frameStart": int(start), + "frameEnd": int(end), + "fps": fps, + "tags": tags, + "camera_name": camera_node_name + } + instance.data["representations"].append(representation) diff --git a/client/ayon_maya/plugins/publish/extract_pointcache.py b/client/ayon_maya/plugins/publish/extract_pointcache.py new file mode 100644 index 00000000..04a895ff --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_pointcache.py @@ -0,0 +1,525 @@ +import os +from collections import OrderedDict + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.alembic import extract_alembic +from ayon_maya.api.lib import ( + get_all_children, + suspended_refresh, + maintained_selection, + iter_visible_nodes_in_range +) +from ayon_core.lib import ( + BoolDef, + TextDef, + NumberDef, + EnumDef, + UISeparatorDef, + UILabelDef, +) +from ayon_core.pipeline.publish import AYONPyblishPluginMixin +from ayon_core.pipeline import KnownPublishError + + +class ExtractAlembic(publish.Extractor, AYONPyblishPluginMixin): + """Produce an alembic of just point positions and normals. + + Positions and normals, uvs, creases are preserved, but nothing more, + for plain and predictable point caches. + + Plugin can run locally or remotely (on a farm - if instance is marked with + "farm" it will be skipped in local processing, but processed on farm) + """ + + label = "Extract Pointcache (Alembic)" + hosts = ["maya"] + families = ["pointcache", "model", "vrayproxy.alembic"] + targets = ["local", "remote"] + + # From settings + attr = [] + attrPrefix = [] + bake_attributes = [] + bake_attribute_prefixes = [] + dataFormat = "ogawa" + eulerFilter = False + melPerFrameCallback = "" + melPostJobCallback = "" + overrides = [] + preRoll = False + preRollStartFrame = 0 + pythonPerFrameCallback = "" + pythonPostJobCallback = "" + renderableOnly = False + stripNamespaces = True + uvsOnly = False + uvWrite = False + userAttr = "" + userAttrPrefix = "" + verbose = False + visibleOnly = False + wholeFrameGeo = False + worldSpace = True + writeColorSets = False + writeCreases = False + writeFaceSets = False + writeNormals = True + writeUVSets = False + writeVisibility = False + + def process(self, instance): + if instance.data.get("farm"): + self.log.debug("Should be processed on farm, skipping.") + return + + nodes, roots = self.get_members_and_roots(instance) + + # Collect the start and end including handles + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + attribute_values = self.get_attr_values_from_data( + instance.data + ) + + attrs = [ + attr.strip() + for attr in attribute_values.get("attr", "").split(";") + if attr.strip() + ] + attrs += instance.data.get("userDefinedAttributes", []) + attrs += self.bake_attributes + attrs += ["cbId"] + + attr_prefixes = [ + attr.strip() + for attr in attribute_values.get("attrPrefix", "").split(";") + if attr.strip() + ] + attr_prefixes += self.bake_attribute_prefixes + + user_attrs = [ + attr.strip() + for attr in attribute_values.get("userAttr", "").split(";") + if attr.strip() + ] + + user_attr_prefixes = [ + attr.strip() + for attr in attribute_values.get("userAttrPrefix", "").split(";") + if attr.strip() + ] + + self.log.debug("Extracting pointcache..") + dirname = self.staging_dir(instance) + + parent_dir = self.staging_dir(instance) + filename = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, filename) + + root = None + if not instance.data.get("includeParentHierarchy", True): + # Set the root nodes if we don't want to include parents + # The roots are to be considered the ones that are the actual + # direct members of the set + root = roots + + kwargs = { + "file": path, + "attr": attrs, + "attrPrefix": attr_prefixes, + "userAttr": user_attrs, + "userAttrPrefix": user_attr_prefixes, + "dataFormat": attribute_values.get("dataFormat", self.dataFormat), + "endFrame": end, + "eulerFilter": attribute_values.get( + "eulerFilter", self.eulerFilter + ), + "preRoll": attribute_values.get("preRoll", self.preRoll), + "preRollStartFrame": attribute_values.get( + "preRollStartFrame", self.preRollStartFrame + ), + "renderableOnly": attribute_values.get( + "renderableOnly", self.renderableOnly + ), + "root": root, + "selection": True, + "startFrame": start, + "step": instance.data.get( + "creator_attributes", {} + ).get("step", 1.0), + "stripNamespaces": attribute_values.get( + "stripNamespaces", self.stripNamespaces + ), + "uvWrite": attribute_values.get("uvWrite", self.uvWrite), + "verbose": attribute_values.get("verbose", self.verbose), + "wholeFrameGeo": attribute_values.get( + "wholeFrameGeo", self.wholeFrameGeo + ), + "worldSpace": attribute_values.get("worldSpace", self.worldSpace), + "writeColorSets": attribute_values.get( + "writeColorSets", self.writeColorSets + ), + "writeCreases": attribute_values.get( + "writeCreases", self.writeCreases + ), + "writeFaceSets": attribute_values.get( + "writeFaceSets", self.writeFaceSets + ), + "writeUVSets": attribute_values.get( + "writeUVSets", self.writeUVSets + ), + "writeVisibility": attribute_values.get( + "writeVisibility", self.writeVisibility + ), + "uvsOnly": attribute_values.get( + "uvsOnly", self.uvsOnly + ), + "melPerFrameCallback": attribute_values.get( + "melPerFrameCallback", self.melPerFrameCallback + ), + "melPostJobCallback": attribute_values.get( + "melPostJobCallback", self.melPostJobCallback + ), + "pythonPerFrameCallback": attribute_values.get( + "pythonPerFrameCallback", self.pythonPostJobCallback + ), + "pythonPostJobCallback": attribute_values.get( + "pythonPostJobCallback", self.pythonPostJobCallback + ), + # Note that this converts `writeNormals` to `noNormals` for the + # `AbcExport` equivalent in `extract_alembic` + "noNormals": not attribute_values.get( + "writeNormals", self.writeNormals + ), + } + + if instance.data.get("visibleOnly", False): + # If we only want to include nodes that are visible in the frame + # range then we need to do our own check. Alembic's `visibleOnly` + # flag does not filter out those that are only hidden on some + # frames as it counts "animated" or "connected" visibilities as + # if it's always visible. + nodes = list( + iter_visible_nodes_in_range(nodes, start=start, end=end) + ) + + suspend = not instance.data.get("refresh", False) + with suspended_refresh(suspend=suspend): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + self.log.debug( + "Running `extract_alembic` with the keyword arguments: " + "{}".format(kwargs) + ) + extract_alembic(**kwargs) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": "abc", + "ext": "abc", + "files": filename, + "stagingDir": dirname + } + instance.data["representations"].append(representation) + + if not instance.data.get("stagingDir_persistent", False): + instance.context.data["cleanupFullPaths"].append(path) + + self.log.debug("Extracted {} to {}".format(instance, dirname)) + + # Extract proxy. + if not instance.data.get("proxy"): + self.log.debug("No proxy nodes found. Skipping proxy extraction.") + return + + path = path.replace(".abc", "_proxy.abc") + kwargs["file"] = path + if not instance.data.get("includeParentHierarchy", True): + # Set the root nodes if we don't want to include parents + # The roots are to be considered the ones that are the actual + # direct members of the set + kwargs["root"] = instance.data["proxyRoots"] + + with suspended_refresh(suspend=suspend): + with maintained_selection(): + cmds.select(instance.data["proxy"]) + extract_alembic(**kwargs) + representation = { + "name": "proxy", + "ext": "abc", + "files": os.path.basename(path), + "stagingDir": dirname, + "outputName": "proxy" + } + instance.data["representations"].append(representation) + + def get_members_and_roots(self, instance): + return instance[:], instance.data.get("setMembers") + + @classmethod + def get_attribute_defs(cls): + if not cls.overrides: + return [] + + override_defs = OrderedDict({ + "eulerFilter": BoolDef( + "eulerFilter", + label="Euler Filter", + default=cls.eulerFilter, + tooltip="Apply Euler filter while sampling rotations." + ), + "renderableOnly": BoolDef( + "renderableOnly", + label="Renderable Only", + default=cls.renderableOnly, + tooltip="Only export renderable visible shapes." + ), + "stripNamespaces": BoolDef( + "stripNamespaces", + label="Strip Namespaces", + default=cls.stripNamespaces, + tooltip=( + "Namespaces will be stripped off of the node before being " + "written to Alembic." + ) + ), + "uvsOnly": BoolDef( + "uvsOnly", + label="UVs Only", + default=cls.uvsOnly, + tooltip=( + "If this flag is present, only uv data for PolyMesh and " + "SubD shapes will be written to the Alembic file." + ) + ), + "uvWrite": BoolDef( + "uvWrite", + label="UV Write", + default=cls.uvWrite, + tooltip=( + "Uv data for PolyMesh and SubD shapes will be written to " + "the Alembic file." + ) + ), + "verbose": BoolDef( + "verbose", + label="Verbose", + default=cls.verbose, + tooltip="Prints the current frame that is being evaluated." + ), + "visibleOnly": BoolDef( + "visibleOnly", + label="Visible Only", + default=cls.visibleOnly, + tooltip="Only export dag objects visible during frame range." + ), + "wholeFrameGeo": BoolDef( + "wholeFrameGeo", + label="Whole Frame Geo", + default=cls.wholeFrameGeo, + tooltip=( + "Data for geometry will only be written out on whole " + "frames." + ) + ), + "worldSpace": BoolDef( + "worldSpace", + label="World Space", + default=cls.worldSpace, + tooltip="Any root nodes will be stored in world space." + ), + "writeColorSets": BoolDef( + "writeColorSets", + label="Write Color Sets", + default=cls.writeColorSets, + tooltip="Write vertex colors with the geometry." + ), + "writeCreases": BoolDef( + "writeCreases", + label="Write Creases", + default=cls.writeCreases, + tooltip="Write the geometry's edge and vertex crease " + "information." + ), + "writeFaceSets": BoolDef( + "writeFaceSets", + label="Write Face Sets", + default=cls.writeFaceSets, + tooltip="Write face sets with the geometry." + ), + "writeNormals": BoolDef( + "writeNormals", + label="Write Normals", + default=cls.writeNormals, + tooltip="Write normals with the deforming geometry." + ), + "writeUVSets": BoolDef( + "writeUVSets", + label="Write UV Sets", + default=cls.writeUVSets, + tooltip=( + "Write all uv sets on MFnMeshes as vector 2 indexed " + "geometry parameters with face varying scope." + ) + ), + "writeVisibility": BoolDef( + "writeVisibility", + label="Write Visibility", + default=cls.writeVisibility, + tooltip=( + "Visibility state will be stored in the Alembic file. " + "Otherwise everything written out is treated as visible." + ) + ), + "preRoll": BoolDef( + "preRoll", + label="Pre Roll", + default=cls.preRoll, + tooltip="This frame range will not be sampled." + ), + "preRollStartFrame": NumberDef( + "preRollStartFrame", + label="Pre Roll Start Frame", + tooltip=( + "The frame to start scene evaluation at. This is used" + " to set the starting frame for time dependent " + "translations and can be used to evaluate run-up that" + " isn't actually translated." + ), + default=cls.preRollStartFrame + ), + "dataFormat": EnumDef( + "dataFormat", + label="Data Format", + items=["ogawa", "HDF"], + default=cls.dataFormat, + tooltip="The data format to use to write the file." + ), + "attr": TextDef( + "attr", + label="Custom Attributes", + placeholder="attr1; attr2; ...", + default=cls.attr, + tooltip=( + "Attributes matching by name will be included in the " + "Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "attrPrefix": TextDef( + "attrPrefix", + label="Custom Attributes Prefix", + placeholder="prefix1; prefix2; ...", + default=cls.attrPrefix, + tooltip=( + "Attributes starting with these prefixes will be included " + "in the Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "userAttr": TextDef( + "userAttr", + label="User Attr", + placeholder="attr1; attr2; ...", + default=cls.userAttr, + tooltip=( + "Attributes matching by name will be included in the " + "Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "userAttrPrefix": TextDef( + "userAttrPrefix", + label="User Attr Prefix", + placeholder="prefix1; prefix2; ...", + default=cls.userAttrPrefix, + tooltip=( + "Attributes starting with these prefixes will be included " + "in the Alembic export. Attributes should be separated by " + "semi-colon `;`" + ) + ), + "melPerFrameCallback": TextDef( + "melPerFrameCallback", + label="Mel Per Frame Callback", + default=cls.melPerFrameCallback, + tooltip=( + "When each frame (and the static frame) is evaluated the " + "string specified is evaluated as a Mel command." + ) + ), + "melPostJobCallback": TextDef( + "melPostJobCallback", + label="Mel Post Job Callback", + default=cls.melPostJobCallback, + tooltip=( + "When the translation has finished the string specified " + "is evaluated as a Mel command." + ) + ), + "pythonPerFrameCallback": TextDef( + "pythonPerFrameCallback", + label="Python Per Frame Callback", + default=cls.pythonPerFrameCallback, + tooltip=( + "When each frame (and the static frame) is evaluated the " + "string specified is evaluated as a python command." + ) + ), + "pythonPostJobCallback": TextDef( + "pythonPostJobCallback", + label="Python Post Frame Callback", + default=cls.pythonPostJobCallback, + tooltip=( + "When the translation has finished the string specified " + "is evaluated as a python command." + ) + ) + }) + + defs = super(ExtractAlembic, cls).get_attribute_defs() + + defs.extend([ + UISeparatorDef("sep_alembic_options"), + UILabelDef("Alembic Options"), + ]) + + # The Arguments that can be modified by the Publisher + overrides = set(cls.overrides) + for key, value in override_defs.items(): + if key not in overrides: + continue + + defs.append(value) + + defs.append( + UISeparatorDef("sep_alembic_options_end") + ) + + return defs + + +class ExtractAnimation(ExtractAlembic): + label = "Extract Animation (Alembic)" + families = ["animation"] + + def get_members_and_roots(self, instance): + # Collect the out set nodes + out_sets = [node for node in instance if node.endswith("out_SET")] + if len(out_sets) != 1: + raise KnownPublishError( + "Couldn't find exactly one out_SET: {0}".format(out_sets) + ) + out_set = out_sets[0] + roots = cmds.sets(out_set, query=True) or [] + + # Include all descendants + nodes = roots.copy() + nodes.extend(get_all_children(roots, ignore_intermediate_objects=True)) + + return nodes, roots diff --git a/client/ayon_maya/plugins/publish/extract_proxy_abc.py b/client/ayon_maya/plugins/publish/extract_proxy_abc.py new file mode 100644 index 00000000..dd15622f --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_proxy_abc.py @@ -0,0 +1,110 @@ +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.alembic import extract_alembic +from ayon_maya.api.lib import ( + suspended_refresh, + maintained_selection, + iter_visible_nodes_in_range +) + + +class ExtractProxyAlembic(publish.Extractor): + """Produce an alembic for bounding box geometry + """ + + label = "Extract Proxy (Alembic)" + hosts = ["maya"] + families = ["proxyAbc"] + + def process(self, instance): + name_suffix = instance.data.get("nameSuffix") + # Collect the start and end including handles + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + attrs = instance.data.get("attr", "").split(";") + attrs = [value for value in attrs if value.strip()] + attrs += ["cbId"] + + attr_prefixes = instance.data.get("attrPrefix", "").split(";") + attr_prefixes = [value for value in attr_prefixes if value.strip()] + + self.log.debug("Extracting Proxy Alembic..") + dirname = self.staging_dir(instance) + + filename = "{name}.abc".format(**instance.data) + path = os.path.join(dirname, filename) + + proxy_root = self.create_proxy_geometry(instance, + name_suffix, + start, + end) + + options = { + "step": instance.data.get("step", 1.0), + "attr": attrs, + "attrPrefix": attr_prefixes, + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False), + "uvWrite": True, + "selection": True, + "worldSpace": instance.data.get("worldSpace", True), + "root": proxy_root + } + + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True + + with suspended_refresh(): + with maintained_selection(): + cmds.select(proxy_root, hi=True, noExpand=True) + extract_alembic(file=path, + startFrame=start, + endFrame=end, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": dirname + } + instance.data["representations"].append(representation) + + if not instance.data.get("stagingDir_persistent", False): + instance.context.data["cleanupFullPaths"].append(path) + + self.log.debug("Extracted {} to {}".format(instance, dirname)) + # remove the bounding box + bbox_master = cmds.ls("bbox_grp") + cmds.delete(bbox_master) + + def create_proxy_geometry(self, instance, name_suffix, start, end): + nodes = instance[:] + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) + + inst_selection = cmds.ls(nodes, long=True) + cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) + # create master group for bounding + # boxes as the main root + master_group = cmds.group(name="bbox_grp") + bbox_sel = cmds.ls(master_group, long=True) + self.log.debug("proxy_root: {}".format(bbox_sel)) + return bbox_sel diff --git a/client/ayon_maya/plugins/publish/extract_redshift_proxy.py b/client/ayon_maya/plugins/publish/extract_redshift_proxy.py new file mode 100644 index 00000000..9d0f4085 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_redshift_proxy.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +"""Redshift Proxy extractor.""" +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import ( + maintained_selection, + renderlayer +) +from ayon_maya.api.render_setup_tools import ( + allow_export_from_render_setup_layer +) + + +class ExtractRedshiftProxy(publish.Extractor): + """Extract the content of the instance to a redshift proxy file.""" + + label = "Redshift Proxy (.rs)" + hosts = ["maya"] + families = ["redshiftproxy"] + + def process(self, instance): + """Extractor entry point.""" + + # Make sure Redshift is loaded + cmds.loadPlugin("redshift4maya", quiet=True) + + staging_dir = self.staging_dir(instance) + file_name = "{}.rs".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + + anim_on = instance.data["animation"] + rs_options = "exportConnectivity=0;enableCompression=1;keepUnused=0;" + repr_files = file_name + + if not anim_on: + # Remove animation information because it is not required for + # non-animated products + keys = ["frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "frameStartHandle", + "frameEndHandle"] + for key in keys: + instance.data.pop(key, None) + + else: + start_frame = instance.data["frameStartHandle"] + end_frame = instance.data["frameEndHandle"] + rs_options = "{}startFrame={};endFrame={};frameStep={};".format( + rs_options, start_frame, + end_frame, instance.data["step"] + ) + + root, ext = os.path.splitext(file_path) + # Padding is taken from number of digits of the end_frame. + # Not sure where Redshift is taking it. + repr_files = [ + "{}.{}{}".format(os.path.basename(root), str(frame).rjust(4, "0"), ext) # noqa: E501 + for frame in range( + int(start_frame), + int(end_frame) + 1, + int(instance.data["step"]) + )] + # vertex_colors = instance.data.get("vertexColors", False) + + # Write out rs file + self.log.debug("Writing: '%s'" % file_path) + + # Allow overriding what renderlayer to export from. By default force + # it to the default render layer. (Note that the renderlayer isn't + # currently exposed as an attribute to artists) + layer = instance.data.get("renderLayer", "defaultRenderLayer") + + with maintained_selection(): + with renderlayer(layer): + with allow_export_from_render_setup_layer(): + cmds.select(instance.data["setMembers"], noExpand=True) + cmds.file(file_path, + preserveReferences=False, + force=True, + type="Redshift Proxy", + exportSelected=True, + options=rs_options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + self.log.debug("Files: {}".format(repr_files)) + + representation = { + 'name': 'rs', + 'ext': 'rs', + 'files': repr_files, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '%s' to: %s" + % (instance.name, staging_dir)) diff --git a/client/ayon_maya/plugins/publish/extract_rendersetup.py b/client/ayon_maya/plugins/publish/extract_rendersetup.py new file mode 100644 index 00000000..48150337 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_rendersetup.py @@ -0,0 +1,41 @@ +import os +import json + +import maya.app.renderSetup.model.renderSetup as renderSetup +from ayon_core.pipeline import publish + + +class ExtractRenderSetup(publish.Extractor): + """ + Produce renderSetup template file + + This will save whole renderSetup to json file for later use. + """ + + label = "Extract RenderSetup" + hosts = ["maya"] + families = ["rendersetup"] + + def process(self, instance): + parent_dir = self.staging_dir(instance) + json_filename = "{}.json".format(instance.name) + json_path = os.path.join(parent_dir, json_filename) + + with open(json_path, "w+") as file: + json.dump( + renderSetup.instance().encode(None), + fp=file, indent=2, sort_keys=True) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'json', + 'ext': 'json', + 'files': json_filename, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) + + self.log.debug( + "Extracted instance '%s' to: %s" % (instance.name, json_path)) diff --git a/client/ayon_maya/plugins/publish/extract_rig.py b/client/ayon_maya/plugins/publish/extract_rig.py new file mode 100644 index 00000000..58e4373d --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_rig.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +"""Extract rig as Maya Scene.""" +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + + +class ExtractRig(publish.Extractor): + """Extract rig as Maya Scene.""" + + label = "Extract Rig (Maya Scene)" + hosts = ["maya"] + families = ["rig"] + scene_type = "ma" + + def process(self, instance): + """Plugin entry point.""" + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using '.{}' as scene type".format(self.scene_type)) + break + except AttributeError: + # no preset found + pass + # Define extract output file path + dir_path = self.staging_dir(instance) + filename = "{0}.{1}".format(instance.name, self.scene_type) + path = os.path.join(dir_path, filename) + + # Perform extraction + self.log.debug("Performing extraction ...") + with maintained_selection(): + cmds.select(instance, noExpand=True) + cmds.file(path, + force=True, + typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501 + exportSelected=True, + preserveReferences=False, + channels=True, + constraints=True, + expressions=True, + constructionHistory=True) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': filename, + "stagingDir": dir_path + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '%s' to: %s", instance.name, path) diff --git a/client/ayon_maya/plugins/publish/extract_skeleton_mesh.py b/client/ayon_maya/plugins/publish/extract_skeleton_mesh.py new file mode 100644 index 00000000..76e49d15 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_skeleton_mesh.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +import os + +from maya import cmds # noqa +import pyblish.api + +from ayon_core.pipeline import publish +from ayon_core.pipeline.publish import OptionalPyblishPluginMixin +from ayon_maya.api import fbx + + +class ExtractSkeletonMesh(publish.Extractor, + OptionalPyblishPluginMixin): + """Extract Rig in FBX format from Maya. + + This extracts the rig in fbx with the constraints + and referenced asset content included. + This also optionally extract animated rig in fbx with + geometries included. + + """ + order = pyblish.api.ExtractorOrder + label = "Extract Skeleton Mesh" + hosts = ["maya"] + families = ["rig.fbx"] + + def process(self, instance): + if not self.is_active(instance.data): + return + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.fbx".format(instance.name) + path = os.path.join(staging_dir, filename) + + fbx_exporter = fbx.FBXExtractor(log=self.log) + out_set = instance.data.get("skeleton_mesh", []) + + instance.data["constraints"] = True + instance.data["skeletonDefinitions"] = True + + fbx_exporter.set_options_from_instance(instance) + + # Export + fbx_exporter.export(out_set, path) + + representations = instance.data.setdefault("representations", []) + representations.append({ + 'name': 'fbx', + 'ext': 'fbx', + 'files': filename, + "stagingDir": staging_dir + }) + + self.log.debug("Extract FBX to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_thumbnail.py b/client/ayon_maya/plugins/publish/extract_thumbnail.py new file mode 100644 index 00000000..2863e446 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_thumbnail.py @@ -0,0 +1,120 @@ +import os +import glob +import tempfile + +from ayon_core.pipeline import publish +from ayon_maya.api import lib + + +class ExtractThumbnail(publish.Extractor): + """Extract viewport thumbnail. + + Takes review camera and creates a thumbnail based on viewport + capture. + + """ + + label = "Thumbnail" + hosts = ["maya"] + families = ["review"] + + def process(self, instance): + self.log.debug("Extracting thumbnail..") + + camera = instance.data["review_camera"] + + task_data = instance.data["anatomyData"].get("task", {}) + capture_preset = lib.get_capture_preset( + task_data.get("name"), + task_data.get("type"), + instance.data["productName"], + instance.context.data["project_settings"], + self.log + ) + + # Create temp directory for thumbnail + # - this is to avoid "override" of source file + dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_thumbnail") + self.log.debug( + "Create temp directory {} for thumbnail".format(dst_staging) + ) + # Store new staging to cleanup paths + filename = instance.name + path = os.path.join(dst_staging, filename) + + self.log.debug("Outputting images to %s" % path) + + preset = lib.generate_capture_preset( + instance, camera, path, + start=1, end=1, + capture_preset=capture_preset) + + preset["camera_options"].update({ + "displayGateMask": False, + "displayResolution": False, + "displayFilmGate": False, + "displayFieldChart": False, + "displaySafeAction": False, + "displaySafeTitle": False, + "displayFilmPivot": False, + "displayFilmOrigin": False, + "overscan": 1.0, + }) + path = lib.render_capture_preset(preset) + + playblast = self._fix_playblast_output_path(path) + + _, thumbnail = os.path.split(playblast) + + self.log.debug("file list {}".format(thumbnail)) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": "thumbnail", + "ext": "jpg", + "files": thumbnail, + "stagingDir": dst_staging, + "thumbnail": True + } + instance.data["representations"].append(representation) + + def _fix_playblast_output_path(self, filepath): + """Workaround a bug in maya.cmds.playblast to return correct filepath. + + When the `viewer` argument is set to False and maya.cmds.playblast + does not automatically open the playblasted file the returned + filepath does not have the file's extension added correctly. + + To workaround this we just glob.glob() for any file extensions and + assume the latest modified file is the correct file and return it. + + """ + # Catch cancelled playblast + if filepath is None: + self.log.warning("Playblast did not result in output path. " + "Playblast is probably interrupted.") + return None + + # Fix: playblast not returning correct filename (with extension) + # Lets assume the most recently modified file is the correct one. + if not os.path.exists(filepath): + directory = os.path.dirname(filepath) + filename = os.path.basename(filepath) + # check if the filepath is has frame based filename + # example : capture.####.png + parts = filename.split(".") + if len(parts) == 3: + query = os.path.join(directory, "{}.*.{}".format(parts[0], + parts[-1])) + files = glob.glob(query) + else: + files = glob.glob("{}.*".format(filepath)) + + if not files: + raise RuntimeError("Couldn't find playblast from: " + "{0}".format(filepath)) + filepath = max(files, key=os.path.getmtime) + + return filepath diff --git a/client/ayon_maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/client/ayon_maya/plugins/publish/extract_unreal_skeletalmesh_abc.py new file mode 100644 index 00000000..f0096e37 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +"""Create Unreal Skeletal Mesh data to be extracted as FBX.""" +import os + +from maya import cmds # noqa + +from ayon_core.pipeline import publish +from ayon_maya.api.alembic import extract_alembic +from ayon_maya.api.lib import ( + suspended_refresh, + maintained_selection +) + + +class ExtractUnrealSkeletalMeshAbc(publish.Extractor): + """Extract Unreal Skeletal Mesh as FBX from Maya. """ + + label = "Extract Unreal Skeletal Mesh - Alembic" + hosts = ["maya"] + families = ["skeletalMesh"] + optional = True + + def process(self, instance): + self.log.debug("Extracting pointcache..") + + geo = cmds.listRelatives( + instance.data.get("geometry"), allDescendents=True, fullPath=True) + joints = cmds.listRelatives( + instance.data.get("joints"), allDescendents=True, fullPath=True) + + nodes = geo + joints + + attrs = instance.data.get("attr", "").split(";") + attrs = [value for value in attrs if value.strip()] + attrs += ["cbId"] + + attr_prefixes = instance.data.get("attrPrefix", "").split(";") + attr_prefixes = [value for value in attr_prefixes if value.strip()] + + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.abc".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.debug("Extracting ABC to: {0}".format(path)) + self.log.debug("Members: {0}".format(nodes)) + self.log.debug("Instance: {0}".format(instance[:])) + + options = { + "step": instance.data.get("step", 1.0), + "attr": attrs, + "attrPrefix": attr_prefixes, + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False), + "uvWrite": True, + "selection": True, + "worldSpace": instance.data.get("worldSpace", True) + } + + self.log.debug("Options: {}".format(options)) + + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True + + if not instance.data.get("includeParentHierarchy", True): + # Set the root nodes if we don't want to include parents + # The roots are to be considered the ones that are the actual + # direct members of the set + options["root"] = instance.data.get("setMembers") + + with suspended_refresh(suspend=instance.data.get("refresh", False)): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + extract_alembic(file=path, + # startFrame=start, + # endFrame=end, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extract ABC successful to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py b/client/ayon_maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py new file mode 100644 index 00000000..95e12795 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +"""Create Unreal Skeletal Mesh data to be extracted as FBX.""" +import os +from contextlib import contextmanager + +from maya import cmds # noqa + +import pyblish.api + +from ayon_core.pipeline import publish +from ayon_maya.api import fbx + + +@contextmanager +def renamed(original_name, renamed_name): + # type: (str, str) -> None + try: + cmds.rename(original_name, renamed_name) + yield + finally: + cmds.rename(renamed_name, original_name) + + +class ExtractUnrealSkeletalMeshFbx(publish.Extractor): + """Extract Unreal Skeletal Mesh as FBX from Maya. """ + + order = pyblish.api.ExtractorOrder - 0.1 + label = "Extract Unreal Skeletal Mesh - FBX" + families = ["skeletalMesh"] + optional = True + + def process(self, instance): + fbx_exporter = fbx.FBXExtractor(log=self.log) + + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.fbx".format(instance.name) + path = os.path.join(staging_dir, filename) + + geo = instance.data.get("geometry") + joints = instance.data.get("joints") + + to_extract = geo + joints + + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.debug("Extracting FBX to: {0}".format(path)) + self.log.debug("Members: {0}".format(to_extract)) + self.log.debug("Instance: {0}".format(instance[:])) + + fbx_exporter.set_options_from_instance(instance) + + # This magic is done for variants. To let Unreal merge correctly + # existing data, top node must have the same name. So for every + # variant we extract we need to rename top node of the rig correctly. + # It is finally done in context manager so it won't affect current + # scene. + + # we rely on hierarchy under one root. + original_parent = to_extract[0].split("|")[1] + + parent_node = instance.data.get("folderPath") + # this needs to be done for AYON + # WARNING: since AYON supports duplicity of asset names, + # this needs to be refactored throughout the pipeline. + parent_node = parent_node.split("/")[-1] + + renamed_to_extract = [] + for node in to_extract: + node_path = node.split("|") + node_path[1] = parent_node + renamed_to_extract.append("|".join(node_path)) + + with renamed(original_parent, parent_node): + self.log.debug("Extracting: {}".format(renamed_to_extract)) + fbx_exporter.export(renamed_to_extract, path) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'fbx', + 'ext': 'fbx', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extract FBX successful to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_unreal_staticmesh.py b/client/ayon_maya/plugins/publish/extract_unreal_staticmesh.py new file mode 100644 index 00000000..140a4e41 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_unreal_staticmesh.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +"""Create Unreal Static Mesh data to be extracted as FBX.""" +import os + +from maya import cmds # noqa + +import pyblish.api + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import ( + parent_nodes, + maintained_selection +) +from ayon_maya.api import fbx + + +class ExtractUnrealStaticMesh(publish.Extractor): + """Extract Unreal Static Mesh as FBX from Maya. """ + + order = pyblish.api.ExtractorOrder - 0.1 + label = "Extract Unreal Static Mesh" + families = ["staticMesh"] + + def process(self, instance): + members = instance.data.get("geometryMembers", []) + if instance.data.get("collisionMembers"): + members = members + instance.data.get("collisionMembers") + + fbx_exporter = fbx.FBXExtractor(log=self.log) + + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.fbx".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.debug("Extracting FBX to: {0}".format(path)) + self.log.debug("Members: {0}".format(members)) + self.log.debug("Instance: {0}".format(instance[:])) + + fbx_exporter.set_options_from_instance(instance) + + with maintained_selection(): + with parent_nodes(members): + self.log.debug("Un-parenting: {}".format(members)) + fbx_exporter.export(members, path) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'fbx', + 'ext': 'fbx', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extract FBX successful to: {0}".format(path)) diff --git a/client/ayon_maya/plugins/publish/extract_unreal_yeticache.py b/client/ayon_maya/plugins/publish/extract_unreal_yeticache.py new file mode 100644 index 00000000..9a6b4eba --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_unreal_yeticache.py @@ -0,0 +1,61 @@ +import os + +from maya import cmds + +from ayon_core.pipeline import publish + + +class ExtractUnrealYetiCache(publish.Extractor): + """Producing Yeti cache files using scene time range. + + This will extract Yeti cache file sequence and fur settings. + """ + + label = "Extract Yeti Cache (Unreal)" + hosts = ["maya"] + families = ["yeticacheUE"] + + def process(self, instance): + + yeti_nodes = cmds.ls(instance, type="pgYetiMaya") + if not yeti_nodes: + raise RuntimeError("No pgYetiMaya nodes found in the instance") + + # Define extract output file path + dirname = self.staging_dir(instance) + + # Collect information for writing cache + start_frame = instance.data["frameStartHandle"] + end_frame = instance.data["frameEndHandle"] + preroll = instance.data["preroll"] + if preroll > 0: + start_frame -= preroll + + kwargs = {} + samples = instance.data.get("samples", 0) + if samples == 0: + kwargs.update({"sampleTimes": "0.0 1.0"}) + else: + kwargs.update({"samples": samples}) + + self.log.debug(f"Writing out cache {start_frame} - {end_frame}") + filename = f"{instance.name}.abc" + path = os.path.join(dirname, filename) + cmds.pgYetiCommand(yeti_nodes, + writeAlembic=path, + range=(start_frame, end_frame), + asUnrealAbc=True, + **kwargs) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + 'stagingDir': dirname + } + instance.data["representations"].append(representation) + + self.log.debug(f"Extracted {instance} to {dirname}") diff --git a/client/ayon_maya/plugins/publish/extract_vrayproxy.py b/client/ayon_maya/plugins/publish/extract_vrayproxy.py new file mode 100644 index 00000000..581195c5 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_vrayproxy.py @@ -0,0 +1,72 @@ +import os + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import maintained_selection + + +class ExtractVRayProxy(publish.Extractor): + """Extract the content of the instance to a vrmesh file + + Things to pay attention to: + - If animation is toggled, are the frames correct + - + """ + + label = "VRay Proxy (.vrmesh)" + hosts = ["maya"] + families = ["vrayproxy.vrmesh"] + + def process(self, instance): + + staging_dir = self.staging_dir(instance) + file_name = "{}.vrmesh".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + + anim_on = instance.data["animation"] + if not anim_on: + # Remove animation information because it is not required for + # non-animated products + keys = ["frameStart", "frameEnd", + "handleStart", "handleEnd", + "frameStartHandle", "frameEndHandle"] + for key in keys: + instance.data.pop(key, None) + + start_frame = 1 + end_frame = 1 + else: + start_frame = instance.data["frameStartHandle"] + end_frame = instance.data["frameEndHandle"] + + vertex_colors = instance.data.get("vertexColors", False) + + # Write out vrmesh file + self.log.debug("Writing: '%s'" % file_path) + with maintained_selection(): + cmds.select(instance.data["setMembers"], noExpand=True) + cmds.vrayCreateProxy(exportType=1, + dir=staging_dir, + fname=file_name, + animOn=anim_on, + animType=3, + startFrame=start_frame, + endFrame=end_frame, + vertexColorsOn=vertex_colors, + ignoreHiddenObjects=True, + createProxyNode=False) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'vrmesh', + 'ext': 'vrmesh', + 'files': file_name, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '%s' to: %s" + % (instance.name, staging_dir)) diff --git a/client/ayon_maya/plugins/publish/extract_vrayscene.py b/client/ayon_maya/plugins/publish/extract_vrayscene.py new file mode 100644 index 00000000..d55c0dde --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_vrayscene.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +"""Extract vrayscene from specified families.""" +import os +import re + +from ayon_core.pipeline import publish +from ayon_maya.api.render_setup_tools import export_in_rs_layer +from ayon_maya.api.lib import maintained_selection + +from maya import cmds + + +class ExtractVrayscene(publish.Extractor): + """Extractor for vrscene.""" + + label = "VRay Scene (.vrscene)" + hosts = ["maya"] + families = ["vrayscene_layer"] + + def process(self, instance): + """Plugin entry point.""" + if instance.data.get("exportOnFarm"): + self.log.debug("vrayscenes will be exported on farm.") + raise NotImplementedError( + "exporting vrayscenes is not implemented") + + # handle sequence + if instance.data.get("vraySceneMultipleFiles"): + self.log.debug("vrayscenes will be exported on farm.") + raise NotImplementedError( + "exporting vrayscene sequences not implemented yet") + + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + node = cmds.createNode("VRaySettingsNode") + else: + node = vray_settings[0] + + # setMembers on vrayscene_layer should contain layer name. + layer_name = instance.data.get("layer") + + staging_dir = self.staging_dir(instance) + template = cmds.getAttr("{}.vrscene_filename".format(node)) + start_frame = instance.data.get( + "frameStartHandle") if instance.data.get( + "vraySceneMultipleFiles") else None + formatted_name = self.format_vray_output_filename( + os.path.basename(instance.data.get("source")), + layer_name, + template, + start_frame + ) + + file_path = os.path.join( + staging_dir, "vrayscene", *formatted_name.split("/")) + + # Write out vrscene file + self.log.debug("Writing: '%s'" % file_path) + with maintained_selection(): + if "*" not in instance.data["setMembers"]: + self.log.debug( + "Exporting: {}".format(instance.data["setMembers"])) + set_members = instance.data["setMembers"] + cmds.select(set_members, noExpand=True) + else: + self.log.debug("Exporting all ...") + set_members = cmds.ls( + long=True, objectsOnly=True, + geometry=True, lights=True, cameras=True) + cmds.select(set_members, noExpand=True) + + self.log.debug("Appending layer name {}".format(layer_name)) + set_members.append(layer_name) + + export_in_rs_layer( + file_path, + set_members, + export=lambda: cmds.file( + file_path, type="V-Ray Scene", + pr=True, es=True, force=True)) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + files = file_path + + representation = { + 'name': 'vrscene', + 'ext': 'vrscene', + 'files': os.path.basename(files), + "stagingDir": os.path.dirname(files), + } + instance.data["representations"].append(representation) + + self.log.debug("Extracted instance '%s' to: %s" + % (instance.name, staging_dir)) + + @staticmethod + def format_vray_output_filename( + filename, layer, template, start_frame=None): + """Format the expected output file of the Export job. + + Example: + filename: /mnt/projects/foo/shot010_v006.mb + template: // + result: "shot010_v006/CHARS/CHARS.vrscene" + + Args: + filename (str): path to scene file. + layer (str): layer name. + template (str): token template. + start_frame (int, optional): start frame - if set we use + multiple files export mode. + + Returns: + str: formatted path. + + """ + # format template to match pythons format specs + template = re.sub(r"<(\w+?)>", r"{\1}", template.lower()) + + # Ensure filename has no extension + file_name, _ = os.path.splitext(filename) + mapping = { + "scene": file_name, + "layer": layer + } + + output_path = template.format(**mapping) + + if start_frame: + filename_zero = "{}_{:04d}.vrscene".format( + output_path, start_frame) + else: + filename_zero = "{}.vrscene".format(output_path) + + result = filename_zero.replace("\\", "/") + + return result diff --git a/client/ayon_maya/plugins/publish/extract_workfile_xgen.py b/client/ayon_maya/plugins/publish/extract_workfile_xgen.py new file mode 100644 index 00000000..227c16d6 --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_workfile_xgen.py @@ -0,0 +1,251 @@ +import os +import shutil +import copy + +from maya import cmds + +import pyblish.api +from ayon_maya.api.alembic import extract_alembic +from ayon_core.pipeline import publish + + +class ExtractWorkfileXgen(publish.Extractor): + """Extract Workfile Xgen. + + When submitting a render, we need to prep Xgen side car files. + """ + + # Offset to run before workfile scene save. + order = pyblish.api.ExtractorOrder - 0.499 + label = "Extract Workfile Xgen" + families = ["workfile"] + hosts = ["maya"] + + def get_render_max_frame_range(self, context): + """Return start to end frame range including all renderlayers in + context. + + This will return the full frame range which includes all frames of the + renderlayer instances to be published/submitted. + + Args: + context (pyblish.api.Context): Current publishing context. + + Returns: + tuple or None: Start frame, end frame tuple if any renderlayers + found. Otherwise None is returned. + + """ + + def _is_active_renderlayer(i): + """Return whether instance is active renderlayer""" + if not i.data.get("publish", True): + return False + + is_renderlayer = ( + "renderlayer" in i.data.get("families", []) or + i.data["productType"] == "renderlayer" + ) + return is_renderlayer + + start_frame = None + end_frame = None + for instance in context: + if not _is_active_renderlayer(instance): + # Only consider renderlyare instances + continue + + render_start_frame = instance.data["frameStart"] + render_end_frame = instance.data["frameEnd"] + + if start_frame is None: + start_frame = render_start_frame + else: + start_frame = min(start_frame, render_start_frame) + + if end_frame is None: + end_frame = render_end_frame + else: + end_frame = max(end_frame, render_end_frame) + + if start_frame is None or end_frame is None: + return + + return start_frame, end_frame + + def process(self, instance): + transfers = [] + + # Validate there is any palettes in the scene. + if not cmds.ls(type="xgmPalette"): + self.log.debug( + "No collections found in the scene. Skipping Xgen extraction." + ) + return + + import xgenm + + # Validate to extract only when we are publishing a renderlayer as + # well. + render_range = self.get_render_max_frame_range(instance.context) + if not render_range: + self.log.debug( + "No publishable renderlayers found in context. Skipping Xgen" + " extraction." + ) + return + + start_frame, end_frame = render_range + + # We decrement start frame and increment end frame so motion blur will + # render correctly. + start_frame -= 1 + end_frame += 1 + + # Extract patches alembic. + path_no_ext, _ = os.path.splitext(instance.context.data["currentFile"]) + kwargs = {"attrPrefix": ["xgen"], "stripNamespaces": True} + alembic_files = [] + for palette in cmds.ls(type="xgmPalette"): + patch_names = [] + for description in xgenm.descriptions(palette): + for name in xgenm.boundGeometry(palette, description): + patch_names.append(name) + + alembic_file = "{}__{}.abc".format( + path_no_ext, palette.replace(":", "__ns__") + ) + extract_alembic( + alembic_file, + root=patch_names, + selection=False, + startFrame=float(start_frame), + endFrame=float(end_frame), + verbose=True, + **kwargs + ) + alembic_files.append(alembic_file) + + template_data = copy.deepcopy(instance.data["anatomyData"]) + anatomy = instance.context.data["anatomy"] + publish_template = anatomy.get_template_item( + "publish", "default", "file" + ) + published_maya_path = publish_template.format(template_data) + published_basename, _ = os.path.splitext(published_maya_path) + + for source in alembic_files: + destination = os.path.join( + os.path.dirname(instance.data["resourcesDir"]), + os.path.basename( + source.replace(path_no_ext, published_basename) + ) + ) + transfers.append((source, destination)) + + # Validate that we are using the published workfile. + deadline_settings = instance.context.get("deadline") + if deadline_settings: + publish_settings = deadline_settings["publish"] + if not publish_settings["MayaSubmitDeadline"]["use_published"]: + self.log.debug( + "Not using the published workfile. Abort Xgen extraction." + ) + return + + # Collect Xgen and Delta files. + xgen_files = [] + sources = [] + current_dir = os.path.dirname(instance.context.data["currentFile"]) + attrs = ["xgFileName", "xgBaseFile"] + for palette in cmds.ls(type="xgmPalette"): + for attr in attrs: + source = os.path.join( + current_dir, cmds.getAttr(palette + "." + attr) + ) + if not os.path.exists(source): + continue + + ext = os.path.splitext(source)[1] + if ext == ".xgen": + xgen_files.append(source) + if ext == ".xgd": + sources.append(source) + + # Copy .xgen file to temporary location and modify. + staging_dir = self.staging_dir(instance) + for source in xgen_files: + destination = os.path.join(staging_dir, os.path.basename(source)) + shutil.copy(source, destination) + + lines = [] + with open(destination, "r") as f: + for line in [line.rstrip() for line in f]: + if line.startswith("\txgProjectPath"): + path = os.path.dirname(instance.data["resourcesDir"]) + line = "\txgProjectPath\t\t{}/".format( + path.replace("\\", "/") + ) + + lines.append(line) + + with open(destination, "w") as f: + f.write("\n".join(lines)) + + sources.append(destination) + + # Add resource files to workfile instance. + for source in sources: + basename = os.path.basename(source) + destination = os.path.join( + os.path.dirname(instance.data["resourcesDir"]), basename + ) + transfers.append((source, destination)) + + destination_dir = os.path.join( + instance.data["resourcesDir"], "collections" + ) + for palette in cmds.ls(type="xgmPalette"): + project_path = xgenm.getAttr("xgProjectPath", palette) + data_path = xgenm.getAttr("xgDataPath", palette) + data_path = data_path.replace("${PROJECT}", project_path) + for path in data_path.split(";"): + for root, _, files in os.walk(path): + for f in files: + source = os.path.join(root, f) + destination = "{}/{}{}".format( + destination_dir, + palette.replace(":", "__ns__"), + source.replace(path, "") + ) + transfers.append((source, destination)) + + for source, destination in transfers: + self.log.debug("Transfer: {} > {}".format(source, destination)) + + instance.data["transfers"] = transfers + + # Set palette attributes in preparation for workfile publish. + attrs = {"xgFileName": None, "xgBaseFile": ""} + data = {} + for palette in cmds.ls(type="xgmPalette"): + attrs["xgFileName"] = "resources/{}.xgen".format( + palette.replace(":", "__ns__") + ) + for attr, value in attrs.items(): + node_attr = palette + "." + attr + + old_value = cmds.getAttr(node_attr) + try: + data[palette][attr] = old_value + except KeyError: + data[palette] = {attr: old_value} + + cmds.setAttr(node_attr, value, type="string") + self.log.debug( + "Setting \"{}\" on \"{}\"".format(value, node_attr) + ) + + cmds.setAttr(palette + "." + "xgExportAsDelta", False) + + instance.data["xgenAttributes"] = data diff --git a/client/ayon_maya/plugins/publish/extract_xgen.py b/client/ayon_maya/plugins/publish/extract_xgen.py new file mode 100644 index 00000000..61cad1dc --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_xgen.py @@ -0,0 +1,153 @@ +import os +import copy +import tempfile + +from maya import cmds +import xgenm + +from ayon_core.pipeline import publish +from ayon_maya.api.lib import ( + maintained_selection, attribute_values, write_xgen_file, delete_after +) + + +class ExtractXgen(publish.Extractor): + """Extract Xgen + + Workflow: + - Duplicate nodes used for patches. + - Export palette and import onto duplicate nodes. + - Export/Publish duplicate nodes and palette. + - Export duplicate palette to .xgen file and add to publish. + - Publish all xgen files as resources. + """ + + label = "Extract Xgen" + hosts = ["maya"] + families = ["xgen"] + scene_type = "ma" + + def process(self, instance): + if "representations" not in instance.data: + instance.data["representations"] = [] + + staging_dir = self.staging_dir(instance) + maya_filename = "{}.{}".format(instance.data["name"], self.scene_type) + maya_filepath = os.path.join(staging_dir, maya_filename) + + # Get published xgen file name. + template_data = copy.deepcopy(instance.data["anatomyData"]) + template_data.update({"ext": "xgen"}) + anatomy = instance.context.data["anatomy"] + file_template = anatomy.get_template_item("publish", "default", "file") + xgen_filename = file_template.format(template_data) + + xgen_path = os.path.join( + self.staging_dir(instance), xgen_filename + ).replace("\\", "/") + type = "mayaAscii" if self.scene_type == "ma" else "mayaBinary" + + # Duplicate xgen setup. + with delete_after() as delete_bin: + duplicate_nodes = [] + # Collect nodes to export. + for node in instance.data["xgenConnections"]: + # Duplicate_transform subd patch geometry. + duplicate_transform = cmds.duplicate(node)[0] + delete_bin.append(duplicate_transform) + + # Discard the children. + shapes = cmds.listRelatives(duplicate_transform, shapes=True) + children = cmds.listRelatives( + duplicate_transform, children=True + ) + cmds.delete(set(children) - set(shapes)) + + if cmds.listRelatives(duplicate_transform, parent=True): + duplicate_transform = cmds.parent( + duplicate_transform, world=True + )[0] + + duplicate_nodes.append(duplicate_transform) + + # Export temp xgen palette files. + temp_xgen_path = os.path.join( + tempfile.gettempdir(), "temp.xgen" + ).replace("\\", "/") + xgenm.exportPalette( + instance.data["xgmPalette"].replace("|", ""), temp_xgen_path + ) + self.log.debug("Extracted to {}".format(temp_xgen_path)) + + # Import xgen onto the duplicate. + with maintained_selection(): + cmds.select(duplicate_nodes) + palette = xgenm.importPalette(temp_xgen_path, []) + + delete_bin.append(palette) + + # Copy shading assignments. + nodes = ( + instance.data["xgmDescriptions"] + + instance.data["xgmSubdPatches"] + ) + for node in nodes: + target_node = node.split(":")[-1] + shading_engine = cmds.listConnections( + node, type="shadingEngine" + )[0] + cmds.sets(target_node, edit=True, forceElement=shading_engine) + + # Export duplicated palettes. + xgenm.exportPalette(palette, xgen_path) + + # Export Maya file. + attribute_data = {"{}.xgFileName".format(palette): xgen_filename} + with attribute_values(attribute_data): + with maintained_selection(): + cmds.select(duplicate_nodes + [palette]) + cmds.file( + maya_filepath, + force=True, + type=type, + exportSelected=True, + preserveReferences=False, + constructionHistory=True, + shader=True, + constraints=True, + expressions=True + ) + + self.log.debug("Extracted to {}".format(maya_filepath)) + + if os.path.exists(temp_xgen_path): + os.remove(temp_xgen_path) + + data = { + "xgDataPath": os.path.join( + instance.data["resourcesDir"], + "collections", + palette.replace(":", "__ns__") + ).replace("\\", "/"), + "xgProjectPath": os.path.dirname( + instance.data["resourcesDir"] + ).replace("\\", "/") + } + write_xgen_file(data, xgen_path) + + # Adding representations. + representation = { + "name": "xgen", + "ext": "xgen", + "files": xgen_filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + representation = { + "name": self.scene_type, + "ext": self.scene_type, + "files": maya_filename, + "stagingDir": staging_dir + } + instance.data["representations"].append(representation) diff --git a/client/ayon_maya/plugins/publish/extract_yeti_cache.py b/client/ayon_maya/plugins/publish/extract_yeti_cache.py new file mode 100644 index 00000000..b9cd7a1b --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_yeti_cache.py @@ -0,0 +1,90 @@ +import os +import json + +from maya import cmds + +from ayon_core.pipeline import publish + + +class ExtractYetiCache(publish.Extractor): + """Producing Yeti cache files using scene time range. + + This will extract Yeti cache file sequence and fur settings. + """ + + label = "Extract Yeti Cache" + hosts = ["maya"] + families = ["yetiRig", "yeticache"] + + def process(self, instance): + + yeti_nodes = cmds.ls(instance, type="pgYetiMaya") + if not yeti_nodes: + raise RuntimeError("No pgYetiMaya nodes found in the instance") + + # Define extract output file path + dirname = self.staging_dir(instance) + + # Collect information for writing cache + start_frame = instance.data["frameStartHandle"] + end_frame = instance.data["frameEndHandle"] + preroll = instance.data["preroll"] + if preroll > 0: + start_frame -= preroll + + kwargs = {} + samples = instance.data.get("samples", 0) + if samples == 0: + kwargs.update({"sampleTimes": "0.0 1.0"}) + else: + kwargs.update({"samples": samples}) + + self.log.debug( + "Writing out cache {} - {}".format(start_frame, end_frame)) + # Start writing the files for snap shot + # will be replace by the Yeti node name + path = os.path.join(dirname, ".%04d.fur") + cmds.pgYetiCommand(yeti_nodes, + writeCache=path, + range=(start_frame, end_frame), + updateViewport=False, + generatePreview=False, + **kwargs) + + cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")] + + self.log.debug("Writing metadata file") + settings = instance.data["fursettings"] + fursettings_path = os.path.join(dirname, "yeti.fursettings") + with open(fursettings_path, "w") as fp: + json.dump(settings, fp, ensure_ascii=False) + + # build representations + if "representations" not in instance.data: + instance.data["representations"] = [] + + self.log.debug("cache files: {}".format(cache_files[0])) + + # Workaround: We do not explicitly register these files with the + # representation solely so that we can write multiple sequences + # a single Subset without renaming - it's a bit of a hack + # TODO: Implement better way to manage this sort of integration + if 'transfers' not in instance.data: + instance.data['transfers'] = [] + + publish_dir = instance.data["publishDir"] + for cache_filename in cache_files: + src = os.path.join(dirname, cache_filename) + dst = os.path.join(publish_dir, os.path.basename(cache_filename)) + instance.data['transfers'].append([src, dst]) + + instance.data["representations"].append( + { + 'name': 'fur', + 'ext': 'fursettings', + 'files': os.path.basename(fursettings_path), + 'stagingDir': dirname + } + ) + + self.log.debug("Extracted {} to {}".format(instance, dirname)) diff --git a/client/ayon_maya/plugins/publish/extract_yeti_rig.py b/client/ayon_maya/plugins/publish/extract_yeti_rig.py new file mode 100644 index 00000000..8ef43fbf --- /dev/null +++ b/client/ayon_maya/plugins/publish/extract_yeti_rig.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +"""Extract Yeti rig.""" + +import os +import json +import contextlib + +from maya import cmds + +from ayon_core.pipeline import publish +from ayon_maya.api import lib + + +@contextlib.contextmanager +def disconnect_plugs(settings, members): + """Disconnect and store attribute connections.""" + members = cmds.ls(members, long=True) + original_connections = [] + try: + for input in settings["inputs"]: + + # Get source shapes + source_nodes = lib.lsattr("cbId", input["sourceID"]) + if not source_nodes: + continue + + source = next(s for s in source_nodes if s not in members) + + # Get destination shapes (the shapes used as hook up) + destination_nodes = lib.lsattr("cbId", input["destinationID"]) + destination = next(i for i in destination_nodes if i in members) + + # Create full connection + connections = input["connections"] + src_attribute = "%s.%s" % (source, connections[0]) + dst_attribute = "%s.%s" % (destination, connections[1]) + + # Check if there is an actual connection + if not cmds.isConnected(src_attribute, dst_attribute): + print("No connection between %s and %s" % ( + src_attribute, dst_attribute)) + continue + + # Break and store connection + cmds.disconnectAttr(src_attribute, dst_attribute) + original_connections.append([src_attribute, dst_attribute]) + yield + finally: + # Restore previous connections + for connection in original_connections: + try: + cmds.connectAttr(connection[0], connection[1]) + except Exception as e: + print(e) + continue + + +@contextlib.contextmanager +def yetigraph_attribute_values(assumed_destination, resources): + """Get values from Yeti attributes in graph.""" + try: + for resource in resources: + if "graphnode" not in resource: + continue + + fname = os.path.basename(resource["source"]) + new_fpath = os.path.join(assumed_destination, fname) + new_fpath = new_fpath.replace("\\", "/") + + try: + cmds.pgYetiGraph(resource["node"], + node=resource["graphnode"], + param=resource["param"], + setParamValueString=new_fpath) + except Exception as exc: + print(">>> Exception:", exc) + yield + + finally: + for resource in resources: + if "graphnode" not in resources: + continue + + try: + cmds.pgYetiGraph(resource["node"], + node=resource["graphnode"], + param=resource["param"], + setParamValue=resource["source"]) + except RuntimeError: + pass + + +class ExtractYetiRig(publish.Extractor): + """Extract the Yeti rig to a Maya Scene and write the Yeti rig data.""" + + label = "Extract Yeti Rig" + hosts = ["maya"] + families = ["yetiRig"] + scene_type = "ma" + + def process(self, instance): + """Plugin entry point.""" + maya_settings = instance.context.data["project_settings"]["maya"] + ext_mapping = { + item["name"]: item["value"] + for item in maya_settings["ext_mapping"] + } + if ext_mapping: + self.log.debug("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.debug( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass + yeti_nodes = cmds.ls(instance, type="pgYetiMaya") + if not yeti_nodes: + raise RuntimeError("No pgYetiMaya nodes found in the instance") + + # Define extract output file path + dirname = self.staging_dir(instance) + settings_path = os.path.join(dirname, "yeti.rigsettings") + + # Yeti related staging dirs + maya_path = os.path.join(dirname, + "yeti_rig.{}".format(self.scene_type)) + + self.log.debug("Writing metadata file: {}".format(settings_path)) + + image_search_path = resources_dir = instance.data["resourcesDir"] + + settings = instance.data.get("rigsettings", None) + assert settings, "Yeti rig settings were not collected." + settings["imageSearchPath"] = image_search_path + with open(settings_path, "w") as fp: + json.dump(settings, fp, ensure_ascii=False) + + # add textures to transfers + if 'transfers' not in instance.data: + instance.data['transfers'] = [] + + for resource in instance.data.get('resources', []): + for file in resource['files']: + src = file + dst = os.path.join(image_search_path, os.path.basename(file)) + instance.data['transfers'].append([src, dst]) + + self.log.debug("adding transfer {} -> {}". format(src, dst)) + + # Ensure the imageSearchPath is being remapped to the publish folder + attr_value = {"%s.imageSearchPath" % n: str(image_search_path) for + n in yeti_nodes} + + # Get input_SET members + input_set = next(i for i in instance if i == "input_SET") + + # Get all items + set_members = cmds.sets(input_set, query=True) or [] + set_members += cmds.listRelatives(set_members, + allDescendents=True, + fullPath=True) or [] + members = cmds.ls(set_members, long=True) + + nodes = instance.data["setMembers"] + resources = instance.data.get("resources", {}) + with disconnect_plugs(settings, members): + with yetigraph_attribute_values(resources_dir, resources): + with lib.attribute_values(attr_value): + cmds.select(nodes, noExpand=True) + cmds.file(maya_path, + force=True, + exportSelected=True, + typ="mayaAscii" if self.scene_type == "ma" else "mayaBinary", # noqa: E501 + preserveReferences=False, + constructionHistory=True, + shader=False) + + # Ensure files can be stored + # build representations + if "representations" not in instance.data: + instance.data["representations"] = [] + + self.log.debug("rig file: {}".format(maya_path)) + instance.data["representations"].append( + { + 'name': self.scene_type, + 'ext': self.scene_type, + 'files': os.path.basename(maya_path), + 'stagingDir': dirname + } + ) + self.log.debug("settings file: {}".format(settings_path)) + instance.data["representations"].append( + { + 'name': 'rigsettings', + 'ext': 'rigsettings', + 'files': os.path.basename(settings_path), + 'stagingDir': dirname + } + ) + + self.log.debug("Extracted {} to {}".format(instance, dirname)) + + cmds.select(clear=True) diff --git a/client/ayon_maya/plugins/publish/help/submit_maya_remote_publish_deadline.xml b/client/ayon_maya/plugins/publish/help/submit_maya_remote_publish_deadline.xml new file mode 100644 index 00000000..fa908fe4 --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/submit_maya_remote_publish_deadline.xml @@ -0,0 +1,16 @@ + + + +Errors found + +## Publish process has errors + +At least one plugin failed before this plugin, job won't be sent to Deadline for processing before all issues are fixed. + +### How to repair? + +Check all failing plugins (should be highlighted in red) and fix issues if possible. + + + + diff --git a/client/ayon_maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml b/client/ayon_maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml new file mode 100644 index 00000000..cdaf97b8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml @@ -0,0 +1,29 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected where the (deformed) mesh has a different `cbId` than +the same mesh in its deformation history. +These should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For **animation** instances using the +repair action is usually the correct fix. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_maya/plugins/publish/help/validate_maya_units.xml b/client/ayon_maya/plugins/publish/help/validate_maya_units.xml new file mode 100644 index 00000000..40169b28 --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/validate_maya_units.xml @@ -0,0 +1,21 @@ + + + +Maya scene units +## Invalid maya scene units + +Detected invalid maya scene units: + +{issues} + + + +### How to repair? + +You can automatically repair the scene units by clicking the Repair action on +the right. + +After that restart publishing with Reload button. + + + diff --git a/client/ayon_maya/plugins/publish/help/validate_mesh_non_manifold.xml b/client/ayon_maya/plugins/publish/help/validate_mesh_non_manifold.xml new file mode 100644 index 00000000..5aec3009 --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/validate_mesh_non_manifold.xml @@ -0,0 +1,33 @@ + + + +Non-Manifold Edges/Vertices +## Non-Manifold Edges/Vertices + +Meshes found with non-manifold edges or vertices. + +### How to repair? + +Run select invalid to select the invalid components. + +You can also try the _cleanup matching polygons_ action which will perform a +cleanup like Maya's `Mesh > Cleanup...` modeling tool. + +It is recommended to always select the invalid to see where the issue is +because if you run any repair on it you will need to double check the topology +is still like you wanted. + + + +### What is non-manifold topology? + +_Non-manifold topology_ polygons have a configuration that cannot be unfolded +into a continuous flat piece, for example: + +- Three or more faces share an edge +- Two or more faces share a single vertex but no edge. +- Adjacent faces have opposite normals + + + + diff --git a/client/ayon_maya/plugins/publish/help/validate_node_ids.xml b/client/ayon_maya/plugins/publish/help/validate_node_ids.xml new file mode 100644 index 00000000..2ef4bc95 --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/validate_node_ids.xml @@ -0,0 +1,29 @@ + + + +Missing node ids +## Nodes found with missing `cbId` + +Nodes were detected in your scene which are missing required `cbId` +attributes for identification further in the pipeline. + +### How to repair? + +The node ids are auto-generated on scene save, and thus the easiest fix is to +save your scene again. + +After that restart publishing with Reload button. + + +### Invalid nodes + +{nodes} + + +### How could this happen? + +This often happens if you've generated new nodes but haven't saved your scene +after creating the new nodes. + + + diff --git a/client/ayon_maya/plugins/publish/help/validate_rig_out_set_node_ids.xml b/client/ayon_maya/plugins/publish/help/validate_rig_out_set_node_ids.xml new file mode 100644 index 00000000..956a7adb --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/validate_rig_out_set_node_ids.xml @@ -0,0 +1,32 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected in the **rig** where the (deformed) mesh has a different +`cbId` than the same mesh in its deformation history. +These should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For rig instances, in many cases the +correct fix is to use the repair action **unless** you explicitly tried +to update the `cbId` values on the meshes - in that case you actually want +to do to the reverse and copy the IDs from the deformed mesh to the history +mesh instead. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_maya/plugins/publish/help/validate_skeletalmesh_hierarchy.xml b/client/ayon_maya/plugins/publish/help/validate_skeletalmesh_hierarchy.xml new file mode 100644 index 00000000..d30c4cb6 --- /dev/null +++ b/client/ayon_maya/plugins/publish/help/validate_skeletalmesh_hierarchy.xml @@ -0,0 +1,14 @@ + + + +Skeletal Mesh Top Node +## Skeletal meshes needs common root + +Skeletal meshes and their joints must be under one common root. + +### How to repair? + +Make sure all geometry and joints resides under same root. + + + diff --git a/client/ayon_maya/plugins/publish/increment_current_file_deadline.py b/client/ayon_maya/plugins/publish/increment_current_file_deadline.py new file mode 100644 index 00000000..a9378df8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/increment_current_file_deadline.py @@ -0,0 +1,39 @@ +import pyblish.api + + +class IncrementCurrentFileDeadline(pyblish.api.ContextPlugin): + """Increment the current file. + + Saves the current maya scene with an increased version number. + + """ + + label = "Increment current file" + order = pyblish.api.IntegratorOrder + 9.0 + hosts = ["maya"] + families = ["workfile"] + optional = True + + def process(self, context): + + from maya import cmds + from ayon_core.lib import version_up + from ayon_core.pipeline.publish import get_errored_plugins_from_context + + errored_plugins = get_errored_plugins_from_context(context) + if any(plugin.__name__ == "MayaSubmitDeadline" + for plugin in errored_plugins): + raise RuntimeError("Skipping incrementing current file because " + "submission to deadline failed.") + + current_filepath = context.data["currentFile"] + new_filepath = version_up(current_filepath) + + # # Ensure the suffix is .ma because we're saving to `mayaAscii` type + if new_filepath.endswith(".ma"): + fileType = "mayaAscii" + elif new_filepath.endswith(".mb"): + fileType = "mayaBinary" + + cmds.file(rename=new_filepath) + cmds.file(save=True, force=True, type=fileType) diff --git a/client/ayon_maya/plugins/publish/reset_xgen_attributes.py b/client/ayon_maya/plugins/publish/reset_xgen_attributes.py new file mode 100644 index 00000000..759aa232 --- /dev/null +++ b/client/ayon_maya/plugins/publish/reset_xgen_attributes.py @@ -0,0 +1,36 @@ +from maya import cmds + +import pyblish.api + + +class ResetXgenAttributes(pyblish.api.InstancePlugin): + """Reset Xgen attributes. + + When the incremental save of the workfile triggers, the Xgen attributes + changes so this plugin will change it back to the values before publishing. + """ + + label = "Reset Xgen Attributes." + # Offset to run after workfile increment plugin. + order = pyblish.api.IntegratorOrder + 10.0 + families = ["workfile"] + + def process(self, instance): + xgen_attributes = instance.data.get("xgenAttributes", {}) + if not xgen_attributes: + return + + for palette, data in xgen_attributes.items(): + for attr, value in data.items(): + node_attr = "{}.{}".format(palette, attr) + self.log.debug( + "Setting \"{}\" on \"{}\"".format(value, node_attr) + ) + cmds.setAttr(node_attr, value, type="string") + cmds.setAttr(palette + ".xgExportAsDelta", True) + + # Need to save the scene, cause the attribute changes above does not + # mark the scene as modified so user can exit without committing the + # changes. + self.log.debug("Saving changes.") + cmds.file(save=True) diff --git a/client/ayon_maya/plugins/publish/save_scene.py b/client/ayon_maya/plugins/publish/save_scene.py new file mode 100644 index 00000000..eb7c06a1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/save_scene.py @@ -0,0 +1,35 @@ +import pyblish.api +from ayon_core.pipeline.workfile.lock_workfile import ( + is_workfile_lock_enabled, + remove_workfile_lock +) + + +class SaveCurrentScene(pyblish.api.ContextPlugin): + """Save current scene + + """ + + label = "Save current file" + order = pyblish.api.ExtractorOrder - 0.49 + hosts = ["maya"] + families = ["renderlayer", "workfile"] + + def process(self, context): + import maya.cmds as cmds + + current = cmds.file(query=True, sceneName=True) + assert context.data['currentFile'] == current + + # If file has no modifications, skip forcing a file save + if not cmds.file(query=True, modified=True): + self.log.debug("Skipping file save as there " + "are no modifications..") + return + project_name = context.data["projectName"] + project_settings = context.data["project_settings"] + # remove lockfile before saving + if is_workfile_lock_enabled("maya", project_name, project_settings): + remove_workfile_lock(current) + self.log.info("Saving current file: {}".format(current)) + cmds.file(save=True, force=True) diff --git a/client/ayon_maya/plugins/publish/validate_alembic_options_defaults.py b/client/ayon_maya/plugins/publish/validate_alembic_options_defaults.py new file mode 100644 index 00000000..bd69e7a3 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_alembic_options_defaults.py @@ -0,0 +1,130 @@ +import inspect +import pyblish.api + +from ayon_core.pipeline import OptionalPyblishPluginMixin +from ayon_core.pipeline.publish import RepairAction, PublishValidationError + + +class ValidateAlembicDefaultsPointcache( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin +): + """Validate the attributes on the instance are defaults. + + The defaults are defined in the project settings. + """ + + order = pyblish.api.ValidatorOrder + families = ["pointcache"] + hosts = ["maya"] + label = "Validate Alembic Options Defaults" + actions = [RepairAction] + optional = True + + plugin_name = "ExtractAlembic" + + @classmethod + def _get_settings(cls, context): + maya_settings = context.data["project_settings"]["maya"] + settings = maya_settings["publish"]["ExtractAlembic"] + return settings + + @classmethod + def _get_publish_attributes(cls, instance): + return instance.data["publish_attributes"][cls.plugin_name] + + def process(self, instance): + if not self.is_active(instance.data): + return + + settings = self._get_settings(instance.context) + attributes = self._get_publish_attributes(instance) + + invalid = {} + for key, value in attributes.items(): + if key not in settings: + # This may occur if attributes have changed over time and an + # existing instance has older legacy attributes that do not + # match the current settings definition. + self.log.warning( + "Publish attribute %s not found in Alembic Export " + "default settings. Ignoring validation for attribute.", + key + ) + continue + + default_value = settings[key] + + # Lists are best to compared sorted since we can't rely on + # the order of the items. + if isinstance(value, list): + value = sorted(value) + default_value = sorted(default_value) + + if value != default_value: + invalid[key] = value, default_value + + if invalid: + non_defaults = "\n".join( + f"- {key}: {value} \t(default: {default_value})" + for key, (value, default_value) in invalid.items() + ) + + raise PublishValidationError( + "Alembic extract options differ from default values:\n" + f"{non_defaults}", + description=self.get_description() + ) + + @staticmethod + def get_description(): + return inspect.cleandoc( + """### Alembic Extract settings differ from defaults + + The alembic export options differ from the project default values. + + If this is intentional you can disable this validation by + disabling **Validate Alembic Options Default**. + + If not you may use the "Repair" action to revert all the options to + their default values. + + """ + ) + + @classmethod + def repair(cls, instance): + # Find create instance twin. + create_context = instance.context.data["create_context"] + create_instance = create_context.get_instance_by_id( + instance.data["instance_id"] + ) + + # Set the settings values on the create context then save to workfile. + settings = cls._get_settings(instance.context) + attributes = cls._get_publish_attributes(create_instance) + for key in attributes: + if key not in settings: + # This may occur if attributes have changed over time and an + # existing instance has older legacy attributes that do not + # match the current settings definition. + cls.log.warning( + "Publish attribute %s not found in Alembic Export " + "default settings. Ignoring repair for attribute.", + key + ) + continue + attributes[key] = settings[key] + + create_context.save_changes() + + +class ValidateAlembicDefaultsAnimation( + ValidateAlembicDefaultsPointcache +): + """Validate the attributes on the instance are defaults. + + The defaults are defined in the project settings. + """ + label = "Validate Alembic Options Defaults" + families = ["animation"] + plugin_name = "ExtractAnimation" diff --git a/client/ayon_maya/plugins/publish/validate_animation_content.py b/client/ayon_maya/plugins/publish/validate_animation_content.py new file mode 100644 index 00000000..17b9bf4e --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_animation_content.py @@ -0,0 +1,59 @@ +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) + + +class ValidateAnimationContent(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Adheres to the content of 'animation' product type + + - Must have collected `out_hierarchy` data. + - All nodes in `out_hierarchy` must be in the instance. + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["animation"] + label = "Animation Content" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + @classmethod + def get_invalid(cls, instance): + + out_set = next((i for i in instance.data["setMembers"] if + i.endswith("out_SET")), None) + + assert out_set, ("Instance '%s' has no objectSet named: `OUT_set`. " + "If this instance is an unloaded reference, " + "please deactivate by toggling the 'Active' attribute" + % instance.name) + + assert 'out_hierarchy' in instance.data, "Missing `out_hierarchy` data" + + out_sets = [node for node in instance if node.endswith("out_SET")] + msg = "Couldn't find exactly one out_SET: {0}".format(out_sets) + assert len(out_sets) == 1, msg + + # All nodes in the `out_hierarchy` must be among the nodes that are + # in the instance. The nodes in the instance are found from the top + # group, as such this tests whether all nodes are under that top group. + + lookup = set(instance[:]) + invalid = [node for node in instance.data['out_hierarchy'] if + node not in lookup] + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Animation content is invalid. See log.") diff --git a/client/ayon_maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/client/ayon_maya/plugins/publish/validate_animation_out_set_related_node_ids.py new file mode 100644 index 00000000..6d53608f --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -0,0 +1,109 @@ +import maya.cmds as cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishXmlValidationError, + OptionalPyblishPluginMixin, + get_plugin_settings, + apply_plugin_settings_automatically +) + + +class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate if deformed shapes have related IDs to the original shapes + + When a deformer is applied in the scene on a referenced mesh that already + had deformers then Maya will create a new shape node for the mesh that + does not have the original id. This validator checks whether the ids are + valid on all the shape nodes in the instance. + + """ + + order = ValidateContentsOrder + families = ['animation', "pointcache", "proxyAbc"] + hosts = ['maya'] + label = 'Animation Out Set Related Node Ids' + actions = [ + ayon_maya.api.action.SelectInvalidAction, + RepairAction + ] + optional = False + + @classmethod + def apply_settings(cls, project_settings): + # Preserve automatic settings applying logic + settings = get_plugin_settings(plugin=cls, + project_settings=project_settings, + log=cls.log, + category="maya") + apply_plugin_settings_automatically(cls, settings, logger=cls.log) + + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + """Process all meshes""" + if not self.is_active(instance.data): + return + # Ensure all nodes have a cbId and a related ID to the original shapes + # if a deformer has been created on the shape + invalid = self.get_invalid(instance) + if invalid: + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) + ) + + @classmethod + def get_invalid(cls, instance): + """Get all nodes which do not match the criteria""" + + invalid = [] + types = ["mesh", "nurbsCurve", "nurbsSurface"] + + # get asset id + nodes = instance.data.get("out_hierarchy", instance[:]) + for node in cmds.ls(nodes, type=types, long=True): + + # We only check when the node is *not* referenced + if cmds.referenceQuery(node, isNodeReferenced=True): + continue + + # Get the current id of the node + node_id = lib.get_id(node) + + history_id = lib.get_id_from_sibling(node) + if history_id is not None and node_id != history_id: + invalid.append(node) + + return invalid + + @classmethod + def repair(cls, instance): + + for node in cls.get_invalid(instance): + # Get the original id from history + history_id = lib.get_id_from_sibling(node) + if not history_id: + cls.log.error("Could not find ID in history for '%s'", node) + continue + + lib.set_id(node, history_id, overwrite=True) diff --git a/client/ayon_maya/plugins/publish/validate_arnold_scene_source.py b/client/ayon_maya/plugins/publish/validate_arnold_scene_source.py new file mode 100644 index 00000000..43c7b99e --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_arnold_scene_source.py @@ -0,0 +1,128 @@ +from maya import cmds + +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, PublishValidationError +) +from ayon_maya.api.lib import is_visible + + +class ValidateArnoldSceneSource(pyblish.api.InstancePlugin): + """Validate Arnold Scene Source. + + Ensure no nodes are hidden. + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["ass", "assProxy"] + label = "Validate Arnold Scene Source" + + def process(self, instance): + # Validate against having nodes hidden, which will result in the + # extraction to ignore the node. + nodes = instance.data["members"] + instance.data.get("proxy", []) + nodes = [x for x in nodes if cmds.objectType(x, isAType='dagNode')] + hidden_nodes = [ + x for x in nodes if not is_visible(x, intermediateObject=False) + ] + if hidden_nodes: + raise PublishValidationError( + "Found hidden nodes:\n\n{}\n\nPlease unhide for" + " publishing.".format("\n".join(hidden_nodes)) + ) + + +class ValidateArnoldSceneSourceProxy(pyblish.api.InstancePlugin): + """Validate Arnold Scene Source Proxy. + + When using proxies we need the nodes to share the same names and not be + parent to the world. This ends up needing at least two groups with content + nodes and proxy nodes in another. + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["assProxy"] + label = "Validate Arnold Scene Source Proxy" + + def _get_nodes_by_name(self, nodes): + ungrouped_nodes = [] + nodes_by_name = {} + parents = [] + for node in nodes: + node_split = node.split("|") + if len(node_split) == 2: + ungrouped_nodes.append(node) + + parent = "|".join(node_split[:-1]) + if parent: + parents.append(parent) + + node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] + nodes_by_name[node_name] = node + + return ungrouped_nodes, nodes_by_name, parents + + def process(self, instance): + # Validate against nodes directly parented to world. + ungrouped_nodes = [] + + nodes, content_nodes_by_name, content_parents = ( + self._get_nodes_by_name(instance.data["members"]) + ) + ungrouped_nodes.extend(nodes) + + nodes, proxy_nodes_by_name, proxy_parents = self._get_nodes_by_name( + instance.data.get("proxy", []) + ) + ungrouped_nodes.extend(nodes) + + if ungrouped_nodes: + raise PublishValidationError( + "Found nodes parented to the world: {}\n" + "All nodes need to be grouped.".format(ungrouped_nodes) + ) + + # Validate for content and proxy nodes amount being the same. + if len(instance.data["members"]) != len(instance.data["proxy"]): + raise PublishValidationError( + "Amount of content nodes ({}) and proxy nodes ({}) needs to " + "be the same.\nContent nodes: {}\nProxy nodes:{}".format( + len(instance.data["members"]), + len(instance.data["proxy"]), + instance.data["members"], + instance.data["proxy"] + ) + ) + + # Validate against content and proxy nodes sharing same parent. + if list(set(content_parents) & set(proxy_parents)): + raise PublishValidationError( + "Content and proxy nodes cannot share the same parent." + ) + + # Validate for content and proxy nodes sharing same names. + sorted_content_names = sorted(content_nodes_by_name.keys()) + sorted_proxy_names = sorted(proxy_nodes_by_name.keys()) + odd_content_names = list( + set(sorted_content_names) - set(sorted_proxy_names) + ) + odd_content_nodes = [ + content_nodes_by_name[x] for x in odd_content_names + ] + odd_proxy_names = list( + set(sorted_proxy_names) - set(sorted_content_names) + ) + odd_proxy_nodes = [ + proxy_nodes_by_name[x] for x in odd_proxy_names + ] + if not sorted_content_names == sorted_proxy_names: + raise PublishValidationError( + "Content and proxy nodes need to share the same names.\n" + "Content nodes not matching: {}\n" + "Proxy nodes not matching: {}".format( + odd_content_nodes, odd_proxy_nodes + ) + ) diff --git a/client/ayon_maya/plugins/publish/validate_arnold_scene_source_cbid.py b/client/ayon_maya/plugins/publish/validate_arnold_scene_source_cbid.py new file mode 100644 index 00000000..546d65e8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_arnold_scene_source_cbid.py @@ -0,0 +1,84 @@ +import pyblish.api +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + RepairAction, + OptionalPyblishPluginMixin +) + + +class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate Arnold Scene Source Cbid. + + It is required for the proxy and content nodes to share the same cbid. + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["assProxy"] + label = "Validate Arnold Scene Source CBID" + actions = [RepairAction] + optional = False + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + @staticmethod + def _get_nodes_by_name(nodes): + nodes_by_name = {} + for node in nodes: + node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] + nodes_by_name[node_name] = node + + return nodes_by_name + + @classmethod + def get_invalid_couples(cls, instance): + nodes_by_name = cls._get_nodes_by_name(instance.data["members"]) + proxy_nodes_by_name = cls._get_nodes_by_name(instance.data["proxy"]) + + invalid_couples = [] + for content_name, content_node in nodes_by_name.items(): + proxy_node = proxy_nodes_by_name.get(content_name, None) + + if not proxy_node: + cls.log.debug( + "Content node '{}' has no matching proxy node.".format( + content_node + ) + ) + continue + + content_id = lib.get_id(content_node) + proxy_id = lib.get_id(proxy_node) + if content_id != proxy_id: + invalid_couples.append((content_node, proxy_node)) + + return invalid_couples + + def process(self, instance): + if not self.is_active(instance.data): + return + # Proxy validation. + if not instance.data["proxy"]: + return + + # Validate for proxy nodes sharing the same cbId as content nodes. + invalid_couples = self.get_invalid_couples(instance) + if invalid_couples: + raise PublishValidationError( + "Found proxy nodes with mismatching cbid:\n{}".format( + invalid_couples + ) + ) + + @classmethod + def repair(cls, instance): + for content_node, proxy_node in cls.get_invalid_couples(instance): + lib.set_id(proxy_node, lib.get_id(content_node), overwrite=True) diff --git a/client/ayon_maya/plugins/publish/validate_ass_relative_paths.py b/client/ayon_maya/plugins/publish/validate_ass_relative_paths.py new file mode 100644 index 00000000..6e65eee5 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_ass_relative_paths.py @@ -0,0 +1,138 @@ +import os +import types + +import maya.cmds as cmds +from mtoa.core import createOptions + +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateAssRelativePaths(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure exporting ass file has set relative texture paths""" + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['ass'] + label = "ASS has relative texture paths" + actions = [RepairAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + # we cannot ask this until user open render settings as + # `defaultArnoldRenderOptions` doesn't exist + errors = [] + + try: + absolute_texture = cmds.getAttr( + "defaultArnoldRenderOptions.absolute_texture_paths") + absolute_procedural = cmds.getAttr( + "defaultArnoldRenderOptions.absolute_procedural_paths") + texture_search_path = cmds.getAttr( + "defaultArnoldRenderOptions.tspath" + ) + procedural_search_path = cmds.getAttr( + "defaultArnoldRenderOptions.pspath" + ) + except ValueError: + raise PublishValidationError( + "Default Arnold options has not been created yet." + ) + + scene_dir, scene_basename = os.path.split(cmds.file(q=True, loc=True)) + scene_name, _ = os.path.splitext(scene_basename) + + if self.maya_is_true(absolute_texture): + errors.append("Texture path is set to be absolute") + if self.maya_is_true(absolute_procedural): + errors.append("Procedural path is set to be absolute") + + anatomy = instance.context.data["anatomy"] + + # Use project root variables for multiplatform support, see: + # https://docs.arnoldrenderer.com/display/A5AFMUG/Search+Path + # ':' as path separator is supported by Arnold for all platforms. + keys = anatomy.root_environments().keys() + paths = [] + for k in keys: + paths.append("[{}]".format(k)) + + self.log.debug("discovered roots: {}".format(":".join(paths))) + + if ":".join(paths) not in texture_search_path: + errors.append(( + "Project roots {} are not in texture_search_path: {}" + ).format(paths, texture_search_path)) + + if ":".join(paths) not in procedural_search_path: + errors.append(( + "Project roots {} are not in procedural_search_path: {}" + ).format(paths, procedural_search_path)) + + if errors: + raise PublishValidationError("\n".join(errors)) + + @classmethod + def repair(cls, instance): + createOptions() + + texture_path = cmds.getAttr("defaultArnoldRenderOptions.tspath") + procedural_path = cmds.getAttr("defaultArnoldRenderOptions.pspath") + + # Use project root variables for multiplatform support, see: + # https://docs.arnoldrenderer.com/display/A5AFMUG/Search+Path + # ':' as path separator is supported by Arnold for all platforms. + anatomy = instance.context.data["anatomy"] + keys = anatomy.root_environments().keys() + paths = [] + for k in keys: + paths.append("[{}]".format(k)) + + cmds.setAttr( + "defaultArnoldRenderOptions.tspath", + ":".join([p for p in paths + [texture_path] if p]), + type="string" + ) + cmds.setAttr( + "defaultArnoldRenderOptions.absolute_texture_paths", + False + ) + + cmds.setAttr( + "defaultArnoldRenderOptions.pspath", + ":".join([p for p in paths + [procedural_path] if p]), + type="string" + ) + cmds.setAttr( + "defaultArnoldRenderOptions.absolute_procedural_paths", + False + ) + + @staticmethod + def find_absolute_path(relative_path, all_root_paths): + for root_path in all_root_paths: + possible_path = os.path.join(root_path, relative_path) + if os.path.exists(possible_path): + return possible_path + + def maya_is_true(self, attr_val): + """ + Whether a Maya attr evaluates to True. + When querying an attribute value from an ambiguous object the + Maya API will return a list of values, which need to be properly + handled to evaluate properly. + """ + if isinstance(attr_val, bool): + return attr_val + elif isinstance(attr_val, (list, types.GeneratorType)): + return any(attr_val) + else: + return bool(attr_val) diff --git a/client/ayon_maya/plugins/publish/validate_assembly_name.py b/client/ayon_maya/plugins/publish/validate_assembly_name.py new file mode 100644 index 00000000..c5ac22dd --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_assembly_name.py @@ -0,0 +1,58 @@ +import pyblish.api +import maya.cmds as cmds +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateAssemblyName(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """ Ensure Assembly name ends with `GRP` + + Check if assembly name ends with `_GRP` string. + """ + + label = "Validate Assembly Name" + order = pyblish.api.ValidatorOrder + families = ["assembly"] + actions = [ayon_maya.api.action.SelectInvalidAction] + active = False + optional = True + + @classmethod + def get_invalid(cls, instance): + cls.log.debug("Checking name of {}".format(instance.name)) + + content_instance = instance.data.get("setMembers", None) + if not content_instance: + cls.log.error("Instance has no nodes!") + return True + + # All children will be included in the extracted export so we also + # validate *all* descendents of the set members and we skip any + # intermediate shapes + descendants = cmds.listRelatives(content_instance, + allDescendents=True, + fullPath=True) or [] + descendants = cmds.ls( + descendants, noIntermediate=True, type="transform") + content_instance = list(set(content_instance + descendants)) + assemblies = cmds.ls(content_instance, assemblies=True, long=True) + + invalid = [] + for cr in assemblies: + if not cr.endswith('_GRP'): + cls.log.error("{} doesn't end with _GRP".format(cr)) + invalid.append(cr) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("Found {} invalid named assembly " + "items".format(len(invalid))) diff --git a/client/ayon_maya/plugins/publish/validate_assembly_namespaces.py b/client/ayon_maya/plugins/publish/validate_assembly_namespaces.py new file mode 100644 index 00000000..51b94859 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_assembly_namespaces.py @@ -0,0 +1,45 @@ +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) + +class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure namespaces are not nested + + In the outliner an item in a normal namespace looks as following: + props_desk_01_:modelDefault + + Any namespace which diverts from that is illegal, example of an illegal + namespace: + room_study_01_:props_desk_01_:modelDefault + + """ + + label = "Validate Assembly Namespaces" + order = pyblish.api.ValidatorOrder + families = ["assembly"] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + self.log.debug("Checking namespace for %s" % instance.name) + if self.get_invalid(instance): + raise PublishValidationError("Nested namespaces found") + + @classmethod + def get_invalid(cls, instance): + + from maya import cmds + + invalid = [] + for item in cmds.ls(instance): + item_parts = item.split("|", 1)[0].rsplit(":") + if len(item_parts[:-1]) > 1: + invalid.append(item) + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_assembly_transforms.py b/client/ayon_maya/plugins/publish/validate_assembly_transforms.py new file mode 100644 index 00000000..c5f0b6fd --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_assembly_transforms.py @@ -0,0 +1,119 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + RepairAction, + OptionalPyblishPluginMixin +) + + +class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Verify only root nodes of the loaded asset have transformations. + + Note: This check is temporary and is subject to change. + + Example outliner: + <> means referenced + =================================================================== + + setdress_GRP| + props_GRP| + barrel_01_:modelDefault| [can have transforms] + <> barrel_01_:barrel_GRP [CAN'T have transforms] + + fence_01_:modelDefault| [can have transforms] + <> fence_01_:fence_GRP [CAN'T have transforms] + + """ + + order = pyblish.api.ValidatorOrder + 0.49 + label = "Assembly Model Transforms" + families = ["assembly"] + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + + prompt_message = ("You are about to reset the matrix to the default values." + " This can alter the look of your scene. " + "Are you sure you want to continue?") + + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + ("Found {} invalid transforms of assembly " + "items").format(len(invalid))) + + @classmethod + def get_invalid(cls, instance): + + from ayon_maya.api import lib + + # Get all transforms in the loaded containers + container_roots = cmds.listRelatives(instance.data["nodesHierarchy"], + children=True, + type="transform", + fullPath=True) + + transforms_in_container = cmds.listRelatives(container_roots, + allDescendents=True, + type="transform", + fullPath=True) + + # Extra check due to the container roots still being passed through + transforms_in_container = [i for i in transforms_in_container if i + not in container_roots] + + # Ensure all are identity matrix + invalid = [] + for transform in transforms_in_container: + node_matrix = cmds.xform(transform, + query=True, + matrix=True, + objectSpace=True) + if not lib.matrix_equals(node_matrix, lib.DEFAULT_MATRIX): + invalid.append(transform) + + return invalid + + @classmethod + def repair(cls, instance): + """Reset matrix for illegally transformed nodes + + We want to ensure the user knows the reset will alter the look of + the current scene because the transformations were done on asset + nodes instead of the asset top node. + + Args: + instance: + + Returns: + None + + """ + + from qtpy import QtWidgets + + from ayon_maya.api import lib + + # Store namespace in variable, cosmetics thingy + choice = QtWidgets.QMessageBox.warning( + None, + "Matrix reset", + cls.prompt_message, + QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel + ) + + invalid = cls.get_invalid(instance) + if not invalid: + cls.log.info("No invalid nodes") + return + + if choice: + cmds.xform(invalid, matrix=lib.DEFAULT_MATRIX, objectSpace=True) diff --git a/client/ayon_maya/plugins/publish/validate_attributes.py b/client/ayon_maya/plugins/publish/validate_attributes.py new file mode 100644 index 00000000..08620e22 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_attributes.py @@ -0,0 +1,118 @@ +import json +from collections import defaultdict + +import pyblish.api +from maya import cmds + +from ayon_maya.api.lib import set_attribute +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, PublishValidationError, RepairAction, + ValidateContentsOrder) + + +class ValidateAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure attributes are consistent. + + Attributes to validate and their values comes from the + "maya/attributes.json" preset, which needs this structure: + { + "family": { + "node_name.attribute_name": attribute_value + } + } + """ + + order = ValidateContentsOrder + label = "Validate Attributes" + hosts = ["maya"] + actions = [RepairAction] + optional = True + + attributes = "{}" + + def process(self, instance): + if not self.is_active(instance.data): + return + + # Check for preset existence. + if not self.get_attributes_data(): + return + + invalid = self.get_invalid(instance, compute=True) + if invalid: + raise PublishValidationError( + "Found attributes with invalid values: {}".format(invalid) + ) + + @classmethod + def get_attributes_data(cls): + return json.loads(cls.attributes) + + @classmethod + def get_invalid(cls, instance, compute=False): + if compute: + return cls.get_invalid_attributes(instance) + else: + return instance.data.get("invalid_attributes", []) + + @classmethod + def get_invalid_attributes(cls, instance): + invalid_attributes = [] + + attributes_data = cls.get_attributes_data() + # Filter families. + families = [instance.data["productType"]] + families += instance.data.get("families", []) + families = set(families) & set(attributes_data.keys()) + if not families: + return [] + + # Get all attributes to validate. + attributes = defaultdict(dict) + for family in families: + if family not in attributes_data: + # No attributes to validate for family + continue + + for preset_attr, preset_value in attributes_data[family].items(): + node_name, attribute_name = preset_attr.split(".", 1) + attributes[node_name][attribute_name] = preset_value + + if not attributes: + return [] + + # Get invalid attributes. + nodes = cmds.ls(long=True) + for node in nodes: + node_name = node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] + if node_name not in attributes: + continue + + for attr_name, expected in attributes[node_name].items(): + + # Skip if attribute does not exist + if not cmds.attributeQuery(attr_name, node=node, exists=True): + continue + + plug = "{}.{}".format(node, attr_name) + value = cmds.getAttr(plug) + if value != expected: + invalid_attributes.append( + { + "attribute": plug, + "expected": expected, + "current": value + } + ) + + instance.data["invalid_attributes"] = invalid_attributes + return invalid_attributes + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + for data in invalid: + node, attr = data["attribute"].split(".", 1) + value = data["expected"] + set_attribute(node=node, attribute=attr, value=value) diff --git a/client/ayon_maya/plugins/publish/validate_camera_attributes.py b/client/ayon_maya/plugins/publish/validate_camera_attributes.py new file mode 100644 index 00000000..95d1e76b --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_camera_attributes.py @@ -0,0 +1,76 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) + + +class ValidateCameraAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates Camera has no invalid attribute keys or values. + + The Alembic file format does not a specific subset of attributes as such + we validate that no values are set there as the output will not match the + current scene. For example the preScale, film offsets and film roll. + + """ + + order = ValidateContentsOrder + families = ['camera'] + hosts = ['maya'] + label = 'Camera Attributes' + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + DEFAULTS = [ + ("filmFitOffset", 0.0), + ("horizontalFilmOffset", 0.0), + ("verticalFilmOffset", 0.0), + ("preScale", 1.0), + ("filmTranslateH", 0.0), + ("filmTranslateV", 0.0), + ("filmRollValue", 0.0) + ] + + @classmethod + def get_invalid(cls, instance): + + # get cameras + members = instance.data['setMembers'] + shapes = cmds.ls(members, dag=True, shapes=True, long=True) + cameras = cmds.ls(shapes, type='camera', long=True) + + invalid = set() + for cam in cameras: + + for attr, default_value in cls.DEFAULTS: + plug = "{}.{}".format(cam, attr) + value = cmds.getAttr(plug) + + # Check if is default value + if value != default_value: + cls.log.warning("Invalid attribute value: {0} " + "(should be: {1}))".format(plug, + default_value)) + invalid.add(cam) + + if cmds.listConnections(plug, source=True, destination=False): + # TODO: Validate correctly whether value always correct + cls.log.warning("%s has incoming connections, validation " + "is unpredictable." % plug) + + return list(invalid) + + def process(self, instance): + """Process all the nodes in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Invalid camera attributes: {}".format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_camera_contents.py b/client/ayon_maya/plugins/publish/validate_camera_contents.py new file mode 100644 index 00000000..3ce512ae --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_camera_contents.py @@ -0,0 +1,83 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin) + + +class ValidateCameraContents(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates Camera instance contents. + + A Camera instance may only hold a SINGLE camera's transform, nothing else. + + It may hold a "locator" as shape, but different shapes are down the + hierarchy. + + """ + + order = ValidateContentsOrder + families = ['camera'] + hosts = ['maya'] + label = 'Camera Contents' + actions = [ayon_maya.api.action.SelectInvalidAction] + validate_shapes = True + optional = False + + @classmethod + def get_invalid(cls, instance): + + # get cameras + members = instance.data['setMembers'] + shapes = cmds.ls(members, dag=True, shapes=True, long=True) + + # single camera + invalid = [] + cameras = cmds.ls(shapes, type='camera', long=True) + if len(cameras) != 1: + cls.log.error("Camera instance must have a single camera. " + "Found {0}: {1}".format(len(cameras), cameras)) + invalid.extend(cameras) + + # We need to check this edge case because returning an extended + # list when there are no actual cameras results in + # still an empty 'invalid' list + if len(cameras) < 1: + if members: + # If there are members in the instance return all of + # them as 'invalid' so the user can still select invalid + cls.log.error("No cameras found in instance " + "members: {}".format(members)) + return members + + raise PublishValidationError( + "No cameras found in empty instance.") + + if not cls.validate_shapes: + cls.log.debug("Not validating shapes in the camera content" + " because 'validate shapes' is disabled") + return invalid + + # non-camera shapes + valid_shapes = cmds.ls(shapes, type=('camera', 'locator'), long=True) + shapes = set(shapes) - set(valid_shapes) + if shapes: + shapes = list(shapes) + cls.log.error("Camera instance should only contain camera " + "shapes. Found: {0}".format(shapes)) + invalid.extend(shapes) + + invalid = list(set(invalid)) + return invalid + + def process(self, instance): + """Process all the nodes in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("Invalid camera contents: " + "{0}".format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_color_sets.py b/client/ayon_maya/plugins/publish/validate_color_sets.py new file mode 100644 index 00000000..40d72974 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_color_sets.py @@ -0,0 +1,63 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError, + RepairAction +) + + +class ValidateColorSets(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate all meshes in the instance have unlocked normals + + These can be removed manually through: + Modeling > Mesh Display > Color Sets Editor + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh ColorSets' + actions = [ + ayon_maya.api.action.SelectInvalidAction, RepairAction + ] + optional = True + + @staticmethod + def has_color_sets(mesh): + """Return whether a mesh node has locked normals""" + return cmds.polyColorSet(mesh, + allColorSets=True, + query=True) + + @classmethod + def get_invalid(cls, instance): + """Return the meshes with ColorSets in instance""" + + meshes = cmds.ls(instance, type='mesh', long=True) + return [mesh for mesh in meshes if cls.has_color_sets(mesh)] + + def process(self, instance): + """Raise invalid when any of the meshes have ColorSets""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + message="Meshes found with Color Sets: {0}".format(invalid) + ) + + @classmethod + def repair(cls, instance): + """Remove all Color Sets on the meshes in this instance.""" + invalid = cls.get_invalid(instance) + for mesh in invalid: + for set in cmds.polyColorSet(mesh, acs=True, q=True): + cmds.polyColorSet(mesh, colorSet=set, delete=True) diff --git a/client/ayon_maya/plugins/publish/validate_current_renderlayer_renderable.py b/client/ayon_maya/plugins/publish/validate_current_renderlayer_renderable.py new file mode 100644 index 00000000..045e2254 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_current_renderlayer_renderable.py @@ -0,0 +1,74 @@ +import inspect + +import pyblish.api + +from maya import cmds +from ayon_core.pipeline.publish import ( + context_plugin_should_run, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): + """Validate if current render layer has a renderable camera. + + There is a bug in Redshift which occurs when the current render layer + at file open has no renderable camera. The error raised is as follows: + + "No renderable cameras found. Aborting render" + + This error is raised even if that render layer will not be rendered. + + """ + + label = "Current Render Layer Has Renderable Camera" + order = pyblish.api.ValidatorOrder + hosts = ["maya"] + families = ["renderlayer"] + optional = False + + def process(self, context): + if not self.is_active(context.data): + return + # Workaround bug pyblish-base#250 + if not context_plugin_should_run(self, context): + return + + # This validator only makes sense when publishing renderlayer instances + # with Redshift. We skip validation if there isn't any. + if not any(self.is_active_redshift_render_instance(instance) + for instance in context): + return + + cameras = cmds.ls(type="camera", long=True) + renderable = any(c for c in cameras if cmds.getAttr(c + ".renderable")) + if not renderable: + layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + raise PublishValidationError( + "Current render layer '{}' has no renderable camera".format( + layer + ), + description=inspect.getdoc(self) + ) + + @staticmethod + def is_active_redshift_render_instance(instance) -> bool: + """Return whether instance is an active renderlayer instance set to + render with Redshift renderer.""" + if not instance.data.get("active", True): + return False + + # Check this before families just because it's a faster check + if not instance.data.get("renderer") == "redshift": + return False + + families = set() + families.add(instance.data.get("family")) + families.update(instance.data.get("families", [])) + if "renderlayer" not in families: + return False + + return True diff --git a/client/ayon_maya/plugins/publish/validate_cycle_error.py b/client/ayon_maya/plugins/publish/validate_cycle_error.py new file mode 100644 index 00000000..ac773a58 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_cycle_error.py @@ -0,0 +1,38 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_maya.api.lib import maintained_selection +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder) + + +class ValidateCycleError(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate nodes produce no cycle errors.""" + + order = ValidateContentsOrder + 0.05 + label = "Cycle Errors" + hosts = ["maya"] + families = ["rig"] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Nodes produce a cycle error: {}".format(invalid)) + + @classmethod + def get_invalid(cls, instance): + + with maintained_selection(): + cmds.select(instance[:], noExpand=True) + plugs = cmds.cycleCheck(all=False, # check selection only + list=True) + invalid = cmds.ls(plugs, objectsOnly=True, long=True) + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_frame_range.py b/client/ayon_maya/plugins/publish/validate_frame_range.py new file mode 100644 index 00000000..20feee87 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_frame_range.py @@ -0,0 +1,206 @@ +import pyblish.api + +from maya import cmds +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_maya.api.lib_rendersetup import ( + get_attr_overrides, + get_attr_in_layer, +) +from maya.app.renderSetup.model.override import AbsOverride + + +class ValidateFrameRange(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates the frame ranges. + + This is an optional validator checking if the frame range on instance + matches the frame range specified for the asset. + + It also validates render frame ranges of render layers. + + Repair action will change everything to match the asset frame range. + + This can be turned off by the artist to allow custom ranges. + """ + + label = "Validate Frame Range" + order = ValidateContentsOrder + families = ["animation", + "pointcache", + "camera", + "proxyAbc", + "renderlayer", + "review", + "yeticache"] + optional = True + actions = [RepairAction] + exclude_product_types = [] + + def process(self, instance): + if not self.is_active(instance.data): + return + + context = instance.context + if instance.data.get("tileRendering"): + self.log.debug( + "Skipping frame range validation because " + "tile rendering is enabled." + ) + return + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + handle_start = int(context.data.get("handleStart")) + handle_end = int(context.data.get("handleEnd")) + frame_start = int(context.data.get("frameStart")) + frame_end = int(context.data.get("frameEnd")) + + inst_start = int(instance.data.get("frameStartHandle")) + inst_end = int(instance.data.get("frameEndHandle")) + inst_frame_start = int(instance.data.get("frameStart")) + inst_frame_end = int(instance.data.get("frameEnd")) + inst_handle_start = int(instance.data.get("handleStart")) + inst_handle_end = int(instance.data.get("handleEnd")) + + # basic sanity checks + assert frame_start_handle <= frame_end_handle, ( + "start frame is lower then end frame") + + # compare with data on instance + errors = [] + # QUESTION shouldn't this be just: + # 'if instance.data["productType"] in self.exclude_product_types:' + if [ef for ef in self.exclude_product_types + if instance.data["productType"] in ef]: + return + if (inst_start != frame_start_handle): + errors.append("Instance start frame [ {} ] doesn't " + "match the one set on folder [ {} ]: " + "{}/{}/{}/{} (handle/start/end/handle)".format( + inst_start, + frame_start_handle, + handle_start, frame_start, frame_end, handle_end + )) + + if (inst_end != frame_end_handle): + errors.append("Instance end frame [ {} ] doesn't " + "match the one set on folder [ {} ]: " + "{}/{}/{}/{} (handle/start/end/handle)".format( + inst_end, + frame_end_handle, + handle_start, frame_start, frame_end, handle_end + )) + + checks = { + "frame start": (frame_start, inst_frame_start), + "frame end": (frame_end, inst_frame_end), + "handle start": (handle_start, inst_handle_start), + "handle end": (handle_end, inst_handle_end) + } + for label, values in checks.items(): + if values[0] != values[1]: + errors.append( + "{} on instance ({}) does not match with the folder " + "({}).".format(label.title(), values[1], values[0]) + ) + + if errors: + report = "Frame range settings are incorrect.\n\n" + for error in errors: + report += "- {}\n\n".format(error) + + raise PublishValidationError(report, title="Frame Range incorrect") + + @classmethod + def repair(cls, instance): + """ + Repair instance container to match folder data. + """ + + if "renderlayer" in instance.data.get("families"): + # Special behavior for renderlayers + cls.repair_renderlayer(instance) + return + + node = instance.data["name"] + context = instance.context + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + handle_start = int(context.data.get("handleStart")) + handle_end = int(context.data.get("handleEnd")) + frame_start = int(context.data.get("frameStart")) + frame_end = int(context.data.get("frameEnd")) + + # Start + if cmds.attributeQuery("handleStart", node=node, exists=True): + cmds.setAttr("{}.handleStart".format(node), handle_start) + cmds.setAttr("{}.frameStart".format(node), frame_start) + else: + # Include start handle in frame start if no separate handleStart + # attribute exists on the node + cmds.setAttr("{}.frameStart".format(node), frame_start_handle) + + # End + if cmds.attributeQuery("handleEnd", node=node, exists=True): + cmds.setAttr("{}.handleEnd".format(node), handle_end) + cmds.setAttr("{}.frameEnd".format(node), frame_end) + else: + # Include end handle in frame end if no separate handleEnd + # attribute exists on the node + cmds.setAttr("{}.frameEnd".format(node), frame_end_handle) + + @classmethod + def repair_renderlayer(cls, instance): + """Apply frame range in render settings""" + + layer = instance.data["renderlayer"] + context = instance.context + + start_attr = "defaultRenderGlobals.startFrame" + end_attr = "defaultRenderGlobals.endFrame" + + frame_start_handle = int(context.data.get("frameStartHandle")) + frame_end_handle = int(context.data.get("frameEndHandle")) + + cls._set_attr_in_layer(start_attr, layer, frame_start_handle) + cls._set_attr_in_layer(end_attr, layer, frame_end_handle) + + @classmethod + def _set_attr_in_layer(cls, node_attr, layer, value): + + if get_attr_in_layer(node_attr, layer=layer) == value: + # Already ok. This can happen if you have multiple renderlayers + # validated and there are no frame range overrides. The first + # layer's repair would have fixed the global value already + return + + overrides = list(get_attr_overrides(node_attr, layer=layer)) + if overrides: + # We set the last absolute override if it is an absolute override + # otherwise we'll add an Absolute override + last_override = overrides[-1][1] + if not isinstance(last_override, AbsOverride): + collection = last_override.parent() + node, attr = node_attr.split(".", 1) + last_override = collection.createAbsoluteOverride(node, attr) + + cls.log.debug("Setting {attr} absolute override in " + "layer '{layer}': {value}".format(layer=layer, + attr=node_attr, + value=value)) + cmds.setAttr(last_override.name() + ".attrValue", value) + + else: + # Set the attribute directly + # (Note that this will set the global attribute) + cls.log.debug("Setting global {attr}: {value}".format( + attr=node_attr, + value=value + )) + cmds.setAttr(node_attr, value) diff --git a/client/ayon_maya/plugins/publish/validate_glsl_material.py b/client/ayon_maya/plugins/publish/validate_glsl_material.py new file mode 100644 index 00000000..3735dbb7 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_glsl_material.py @@ -0,0 +1,210 @@ +import os +from maya import cmds + +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder +) +from ayon_core.pipeline import PublishValidationError, OptionalPyblishPluginMixin + + +class ValidateGLSLMaterial(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """ + Validate if the asset uses GLSL Shader + """ + + order = ValidateContentsOrder + 0.1 + families = ['gltf'] + hosts = ['maya'] + label = 'GLSL Shader for GLTF' + actions = [RepairAction] + optional = True + active = True + + def process(self, instance): + if not self.is_active(instance.data): + return + shading_grp = self.get_material_from_shapes(instance) + if not shading_grp: + raise PublishValidationError("No shading group found") + invalid = self.get_texture_shader_invalid(instance) + if invalid: + raise PublishValidationError("Non GLSL Shader found: " + "{0}".format(invalid)) + + def get_material_from_shapes(self, instance): + shapes = cmds.ls(instance, type="mesh", long=True) + for shape in shapes: + shading_grp = cmds.listConnections(shape, + destination=True, + type="shadingEngine") + + return shading_grp or [] + + def get_texture_shader_invalid(self, instance): + + invalid = set() + shading_grp = self.get_material_from_shapes(instance) + for shading_group in shading_grp: + material_name = "{}.surfaceShader".format(shading_group) + material = cmds.listConnections(material_name, + source=True, + destination=False, + type="GLSLShader") + + if not material: + # add material name + material = cmds.listConnections(material_name)[0] + invalid.add(material) + + return list(invalid) + + @classmethod + def repair(cls, instance): + """ + Repair instance by assigning GLSL Shader + to the material + """ + cls.assign_glsl_shader(instance) + return + + @classmethod + def assign_glsl_shader(cls, instance): + """ + Converting StingrayPBS material to GLSL Shaders + for the glb export through Maya2GLTF plugin + """ + + meshes = cmds.ls(instance, type="mesh", long=True) + cls.log.debug("meshes: {}".format(meshes)) + # load the glsl shader plugin + cmds.loadPlugin("glslShader", quiet=True) + + for mesh in meshes: + # create glsl shader + glsl = cmds.createNode('GLSLShader') + glsl_shading_grp = cmds.sets(name=glsl + "SG", empty=True, + renderable=True, noSurfaceShader=True) + cmds.connectAttr(glsl + ".outColor", + glsl_shading_grp + ".surfaceShader") + + # load the maya2gltf shader + ogsfx_path = instance.context.data["project_settings"]["maya"]["publish"]["ExtractGLB"]["ogsfx_path"] # noqa + if not os.path.exists(ogsfx_path): + if ogsfx_path: + # if custom ogsfx path is not specified + # the log below is the warning for the user + cls.log.warning("ogsfx shader file " + "not found in {}".format(ogsfx_path)) + + cls.log.debug("Searching the ogsfx shader file in " + "default maya directory...") + # re-direct to search the ogsfx path in maya_dir + ogsfx_path = os.getenv("MAYA_APP_DIR") + ogsfx_path + if not os.path.exists(ogsfx_path): + raise PublishValidationError("The ogsfx shader file does not " # noqa + "exist: {}".format(ogsfx_path)) # noqa + + cmds.setAttr(glsl + ".shader", ogsfx_path, typ="string") + # list the materials used for the assets + shading_grp = cmds.listConnections(mesh, + destination=True, + type="shadingEngine") + + # get the materials related to the selected assets + for material in shading_grp: + pbs_shader = cmds.listConnections(material, + destination=True, + type="StingrayPBS") + if pbs_shader: + cls.pbs_shader_conversion(pbs_shader, glsl) + # setting up to relink the texture if + # the mesh is with aiStandardSurface + arnold_shader = cmds.listConnections(material, + destination=True, + type="aiStandardSurface") + if arnold_shader: + cls.arnold_shader_conversion(arnold_shader, glsl) + + cmds.sets(mesh, forceElement=str(glsl_shading_grp)) + + @classmethod + def pbs_shader_conversion(cls, main_shader, glsl): + + cls.log.debug("StringrayPBS detected " + "-> Can do texture conversion") + + for shader in main_shader: + # get the file textures related to the PBS Shader + albedo = cmds.listConnections(shader + + ".TEX_color_map") + if albedo: + dif_output = albedo[0] + ".outColor" + # get the glsl_shader input + # reconnect the file nodes to maya2gltf shader + glsl_dif = glsl + ".u_BaseColorTexture" + cmds.connectAttr(dif_output, glsl_dif) + + # connect orm map if there is one + orm_packed = cmds.listConnections(shader + + ".TEX_ao_map") + if orm_packed: + orm_output = orm_packed[0] + ".outColor" + + mtl = glsl + ".u_MetallicTexture" + ao = glsl + ".u_OcclusionTexture" + rough = glsl + ".u_RoughnessTexture" + + cmds.connectAttr(orm_output, mtl) + cmds.connectAttr(orm_output, ao) + cmds.connectAttr(orm_output, rough) + + # connect nrm map if there is one + nrm = cmds.listConnections(shader + + ".TEX_normal_map") + if nrm: + nrm_output = nrm[0] + ".outColor" + glsl_nrm = glsl + ".u_NormalTexture" + cmds.connectAttr(nrm_output, glsl_nrm) + + @classmethod + def arnold_shader_conversion(cls, main_shader, glsl): + cls.log.debug("aiStandardSurface detected " + "-> Can do texture conversion") + + for shader in main_shader: + # get the file textures related to the PBS Shader + albedo = cmds.listConnections(shader + ".baseColor") + if albedo: + dif_output = albedo[0] + ".outColor" + # get the glsl_shader input + # reconnect the file nodes to maya2gltf shader + glsl_dif = glsl + ".u_BaseColorTexture" + cmds.connectAttr(dif_output, glsl_dif) + + orm_packed = cmds.listConnections(shader + + ".specularRoughness") + if orm_packed: + orm_output = orm_packed[0] + ".outColor" + + mtl = glsl + ".u_MetallicTexture" + ao = glsl + ".u_OcclusionTexture" + rough = glsl + ".u_RoughnessTexture" + + cmds.connectAttr(orm_output, mtl) + cmds.connectAttr(orm_output, ao) + cmds.connectAttr(orm_output, rough) + + # connect nrm map if there is one + bump_node = cmds.listConnections(shader + + ".normalCamera") + if bump_node: + for bump in bump_node: + nrm = cmds.listConnections(bump + + ".bumpValue") + if nrm: + nrm_output = nrm[0] + ".outColor" + glsl_nrm = glsl + ".u_NormalTexture" + cmds.connectAttr(nrm_output, glsl_nrm) diff --git a/client/ayon_maya/plugins/publish/validate_glsl_plugin.py b/client/ayon_maya/plugins/publish/validate_glsl_plugin.py new file mode 100644 index 00000000..d783da8b --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_glsl_plugin.py @@ -0,0 +1,37 @@ + +from maya import cmds + +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateGLSLPlugin(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """ + Validate if the asset uses GLSL Shader + """ + + order = ValidateContentsOrder + 0.15 + families = ['gltf'] + hosts = ['maya'] + label = 'maya2glTF plugin' + actions = [RepairAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + if not cmds.pluginInfo("maya2glTF", query=True, loaded=True): + raise PublishValidationError("maya2glTF is not loaded") + + @classmethod + def repair(cls, instance): + """ + Repair instance by enabling the plugin + """ + return cmds.loadPlugin("maya2glTF", quiet=True) diff --git a/client/ayon_maya/plugins/publish/validate_instance_has_members.py b/client/ayon_maya/plugins/publish/validate_instance_has_members.py new file mode 100644 index 00000000..de20a7c1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_instance_has_members.py @@ -0,0 +1,39 @@ +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateInstanceHasMembers(pyblish.api.InstancePlugin): + """Validates instance objectSet has *any* members.""" + + order = ValidateContentsOrder + hosts = ["maya"] + label = 'Instance has members' + actions = [ayon_maya.api.action.SelectInvalidAction] + + @classmethod + def get_invalid(cls, instance): + invalid = list() + if not instance.data.get("setMembers"): + objectset_name = instance.data['name'] + invalid.append(objectset_name) + + return invalid + + def process(self, instance): + # Allow renderlayer, rendersetup and workfile to be empty + skip_families = {"workfile", "renderlayer", "rendersetup"} + if instance.data.get("productType") in skip_families: + return + + invalid = self.get_invalid(instance) + if invalid: + # Invalid will always be a single entry, we log the single name + name = invalid[0] + raise PublishValidationError( + title="Empty instance", + message="Instance '{0}' is empty".format(name) + ) diff --git a/client/ayon_maya/plugins/publish/validate_instance_in_context.py b/client/ayon_maya/plugins/publish/validate_instance_in_context.py new file mode 100644 index 00000000..f67845bc --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_instance_in_context.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +"""Validate if instance asset is the same as context asset.""" +from __future__ import absolute_import + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateInstanceInContext(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validator to check if instance asset match context asset. + + When working in per-shot style you always publish data in context of + current asset (shot). This validator checks if this is so. It is optional + so it can be disabled when needed. + + Action on this validator will select invalid instances in Outliner. + """ + + order = ValidateContentsOrder + label = "Instance in same Context" + optional = True + hosts = ["maya"] + actions = [ + ayon_maya.api.action.SelectInvalidAction, RepairAction + ] + + def process(self, instance): + if not self.is_active(instance.data): + return + + folder_path = instance.data.get("folderPath") + task = instance.data.get("task") + context = self.get_context(instance) + if (folder_path, task) != context: + context_label = "{} > {}".format(*context) + instance_label = "{} > {}".format(folder_path, task) + raise PublishValidationError( + message=( + "Instance '{}' publishes to different context than current" + " context: {}. Current context: {}".format( + instance.name, instance_label, context_label + ) + ), + description=( + "## Publishing to a different context data\n" + "There are publish instances present which are publishing " + "into a different folder than your current context.\n\n" + "Usually this is not what you want but there can be cases " + "where you might want to publish into another folder or " + "shot. If that's the case you can disable the validation " + "on the instance to ignore it." + ) + ) + + @classmethod + def get_invalid(cls, instance): + return [instance.data["instance_node"]] + + @classmethod + def repair(cls, instance): + context_folder_path, context_task = cls.get_context( + instance) + + create_context = instance.context.data["create_context"] + instance_id = instance.data["instance_id"] + created_instance = create_context.get_instance_by_id( + instance_id + ) + created_instance["folderPath"] = context_folder_path + created_instance["task"] = context_task + create_context.save_changes() + + @staticmethod + def get_context(instance): + """Return asset, task from publishing context data""" + context = instance.context + return context.data["folderPath"], context.data["task"] diff --git a/client/ayon_maya/plugins/publish/validate_instance_subset.py b/client/ayon_maya/plugins/publish/validate_instance_subset.py new file mode 100644 index 00000000..df9ca0bf --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_instance_subset.py @@ -0,0 +1,53 @@ +import pyblish.api +import string + +import six +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + +# Allow only characters, numbers and underscore +allowed = set(string.ascii_lowercase + + string.ascii_uppercase + + string.digits + + '_') + + +def validate_name(product_name): + return all(x in allowed for x in product_name) + + +class ValidateSubsetName(pyblish.api.InstancePlugin): + """Validates product name has only valid characters""" + + order = ValidateContentsOrder + families = ["*"] + label = "Product Name" + + def process(self, instance): + + product_name = instance.data.get("productName", None) + + # Ensure product data + if product_name is None: + raise PublishValidationError( + "Instance is missing product name: {0}".format(product_name) + ) + + if not isinstance(product_name, six.string_types): + raise PublishValidationError(( + "Instance product name must be string, got: {0} ({1})" + ).format(product_name, type(product_name))) + + # Ensure is not empty product + if not product_name: + raise PublishValidationError( + "Instance product name is empty: {0}".format(product_name) + ) + + # Validate product characters + if not validate_name(product_name): + raise PublishValidationError(( + "Instance product name contains invalid characters: {0}" + ).format(product_name)) diff --git a/client/ayon_maya/plugins/publish/validate_loaded_plugin.py b/client/ayon_maya/plugins/publish/validate_loaded_plugin.py new file mode 100644 index 00000000..a05920a2 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_loaded_plugin.py @@ -0,0 +1,55 @@ +import os +import pyblish.api +import maya.cmds as cmds + +from ayon_core.pipeline.publish import ( + RepairContextAction, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateLoadedPlugin(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): + """Ensure there are no unauthorized loaded plugins""" + + label = "Loaded Plugin" + order = pyblish.api.ValidatorOrder + host = ["maya"] + actions = [RepairContextAction] + optional = True + + @classmethod + def get_invalid(cls, context): + + invalid = [] + loaded_plugin = cmds.pluginInfo(query=True, listPlugins=True) + # get variable from AYON settings + whitelist_native_plugins = cls.whitelist_native_plugins + authorized_plugins = cls.authorized_plugins or [] + + for plugin in loaded_plugin: + if not whitelist_native_plugins and os.getenv('MAYA_LOCATION') \ + in cmds.pluginInfo(plugin, query=True, path=True): + continue + if plugin not in authorized_plugins: + invalid.append(plugin) + + return invalid + + def process(self, context): + if not self.is_active(context.data): + return + invalid = self.get_invalid(context) + if invalid: + raise PublishValidationError( + "Found forbidden plugin name: {}".format(", ".join(invalid)) + ) + + @classmethod + def repair(cls, context): + """Unload forbidden plugins""" + + for plugin in cls.get_invalid(context): + cmds.pluginInfo(plugin, edit=True, autoload=False) + cmds.unloadPlugin(plugin, force=True) diff --git a/client/ayon_maya/plugins/publish/validate_look_contents.py b/client/ayon_maya/plugins/publish/validate_look_contents.py new file mode 100644 index 00000000..4709a7bb --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_contents.py @@ -0,0 +1,138 @@ +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + ValidateContentsOrder +) + + +from maya import cmds # noqa + + +class ValidateLookContents(pyblish.api.InstancePlugin): + """Validate look instance contents + + Rules: + * Look data must have `relationships` and `attributes` keys. + * At least one relationship must be collection. + * All relationship object sets at least have an ID value + + Tip: + * When no node IDs are found on shadingEngines please save your scene + and try again. + + """ + + order = ValidateContentsOrder + families = ['look'] + hosts = ['maya'] + label = 'Look Data Contents' + actions = [ayon_maya.api.action.SelectInvalidAction] + + def process(self, instance): + """Process all the nodes in the instance""" + + if not instance[:]: + raise PublishValidationError("Instance is empty") + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("'{}' has invalid look " + "content".format(instance.name)) + + @classmethod + def get_invalid(cls, instance): + """Get all invalid nodes""" + + # check if data has the right attributes and content + attributes = cls.validate_lookdata_attributes(instance) + # check the looks for ID + looks = cls.validate_looks(instance) + # check if file nodes have valid files + files = cls.validate_files(instance) + + invalid = looks + attributes + files + + return invalid + + @classmethod + def validate_lookdata_attributes(cls, instance): + """Check if the lookData has the required attributes + + Args: + instance + + """ + + invalid = set() + + keys = ["relationships", "attributes"] + lookdata = instance.data["lookData"] + for key in keys: + if key not in lookdata: + cls.log.error("Look Data has no key " + "'{}'".format(key)) + invalid.add(instance.name) + + # Validate at least one single relationship is collected + if not lookdata["relationships"]: + cls.log.error("Look '%s' has no " + "`relationships`" % instance.name) + invalid.add(instance.name) + + # Check if attributes are on a node with an ID, crucial for rebuild! + for attr_changes in lookdata["attributes"]: + if not attr_changes["uuid"] and not attr_changes["attributes"]: + cls.log.error("Node '%s' has no cbId, please set the " + "attributes to its children if it has any" + % attr_changes["name"]) + invalid.add(instance.name) + + return list(invalid) + + @classmethod + def validate_looks(cls, instance): + + looks = instance.data["lookData"]["relationships"] + invalid = [] + for name, data in looks.items(): + if not data["uuid"]: + cls.log.error("Look '{}' has no UUID".format(name)) + invalid.append(name) + + return invalid + + @classmethod + def validate_files(cls, instance): + + invalid = [] + + resources = instance.data.get("resources", []) + for resource in resources: + files = resource["files"] + if len(files) == 0: + node = resource["node"] + cls.log.error("File node '%s' uses no or non-existing " + "files" % node) + invalid.append(node) + + return invalid + + @classmethod + def validate_renderer(cls, instance): + # TODO: Rewrite this to be more specific and configurable + renderer = cmds.getAttr( + 'defaultRenderGlobals.currentRenderer').lower() + do_maketx = instance.data.get("maketx", False) + do_rstex = instance.data.get("rstex", False) + processors = [] + + if do_maketx: + processors.append('arnold') + if do_rstex: + processors.append('redshift') + + for processor in processors: + if processor == renderer: + continue + else: + cls.log.error("Converted texture does not match current renderer.") # noqa diff --git a/client/ayon_maya/plugins/publish/validate_look_default_shaders_connections.py b/client/ayon_maya/plugins/publish/validate_look_default_shaders_connections.py new file mode 100644 index 00000000..cfd41561 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_default_shaders_connections.py @@ -0,0 +1,77 @@ +from maya import cmds + +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairContextAction, + PublishValidationError +) + + +class ValidateLookDefaultShadersConnections(pyblish.api.ContextPlugin): + """Validate default shaders in the scene have their default connections. + + For example the standardSurface1 or lambert1 (maya 2023 and before) could + potentially be disconnected from the initialShadingGroup. As such it's not + lambert1 that will be identified as the default shader which can have + unpredictable results. + + To fix the default connections need to be made again. See the logs for + more details on which connections are missing. + + """ + + order = pyblish.api.ValidatorOrder - 0.4999 + families = ['look'] + hosts = ['maya'] + label = 'Look Default Shader Connections' + actions = [RepairContextAction] + + # The default connections to check + DEFAULTS = { + "initialShadingGroup.surfaceShader": ["standardSurface1.outColor", + "lambert1.outColor"], + "initialParticleSE.surfaceShader": ["standardSurface1.outColor", + "lambert1.outColor"], + "initialParticleSE.volumeShader": ["particleCloud1.outColor"] + } + + def process(self, context): + + if self.get_invalid(): + raise PublishValidationError( + "Default shaders in your scene do not have their " + "default shader connections. Please repair them to continue." + ) + + @classmethod + def get_invalid(cls): + + # Process as usual + invalid = list() + for plug, valid_inputs in cls.DEFAULTS.items(): + inputs = cmds.listConnections(plug, + source=True, + destination=False, + plugs=True) or None + if not inputs or inputs[0] not in valid_inputs: + cls.log.error( + "{0} is not connected to {1}. This can result in " + "unexpected behavior. Please reconnect to continue." + "".format(plug, " or ".join(valid_inputs)) + ) + invalid.append(plug) + + return invalid + + @classmethod + def repair(cls, context): + invalid = cls.get_invalid() + for plug in invalid: + valid_inputs = cls.DEFAULTS[plug] + for valid_input in valid_inputs: + if cmds.objExists(valid_input): + cls.log.info( + "Connecting {} -> {}".format(valid_input, plug) + ) + cmds.connectAttr(valid_input, plug, force=True) + break diff --git a/client/ayon_maya/plugins/publish/validate_look_id_reference_edits.py b/client/ayon_maya/plugins/publish/validate_look_id_reference_edits.py new file mode 100644 index 00000000..5a4ccc16 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_id_reference_edits.py @@ -0,0 +1,109 @@ +from collections import defaultdict +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): + """Validate nodes in look have no reference edits to cbId. + + Note: + This only validates the cbId edits on the referenced nodes that are + used in the look. For example, a transform can have its cbId changed + without being invalidated when it is not used in the look's assignment. + + """ + + order = ValidateContentsOrder + families = ['look'] + hosts = ['maya'] + label = 'Look Id Reference Edits' + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError("Invalid nodes %s" % (invalid,)) + + @staticmethod + def get_invalid(instance): + + # Collect all referenced members + references = defaultdict(set) + relationships = instance.data["lookData"]["relationships"] + for relationship in relationships.values(): + for member in relationship['members']: + node = member["name"] + + if cmds.referenceQuery(node, isNodeReferenced=True): + ref = cmds.referenceQuery(node, referenceNode=True) + references[ref].add(node) + + # Validate whether any has changes to 'cbId' attribute + invalid = list() + for ref, nodes in references.items(): + edits = cmds.referenceQuery(editAttrs=True, + editNodes=True, + showDagPath=True, + showNamespace=True, + onReferenceNode=ref) + for edit in edits: + + # Ensure it is an attribute ending with .cbId + # thus also ignore just node edits (like parenting) + if not edit.endswith(".cbId"): + continue + + # Ensure the attribute is 'cbId' (and not a nested attribute) + node, attr = edit.split(".", 1) + if attr != "cbId": + continue + + if node in nodes: + invalid.append(node) + + return invalid + + @classmethod + def repair(cls, instance): + + invalid = cls.get_invalid(instance) + + # Group invalid nodes by reference node + references = defaultdict(set) + for node in invalid: + ref = cmds.referenceQuery(node, referenceNode=True) + references[ref].add(node) + + # Remove the reference edits on the nodes per reference node + for ref, nodes in references.items(): + for node in nodes: + + # Somehow this only works if you run the the removal + # per edit command. + for command in ["addAttr", + "connectAttr", + "deleteAttr", + "disconnectAttr", + "setAttr"]: + cmds.referenceEdit("{}.cbId".format(node), + removeEdits=True, + successfulEdits=True, + failedEdits=True, + editCommand=command, + onReferenceNode=ref) diff --git a/client/ayon_maya/plugins/publish/validate_look_no_default_shaders.py b/client/ayon_maya/plugins/publish/validate_look_no_default_shaders.py new file mode 100644 index 00000000..21a056c8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_no_default_shaders.py @@ -0,0 +1,65 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateLookNoDefaultShaders(pyblish.api.InstancePlugin): + """Validate if any node has a connection to a default shader. + + This checks whether the look has any members of: + - lambert1 + - initialShadingGroup + - initialParticleSE + - particleCloud1 + + If any of those is present it will raise an error. A look is not allowed + to have any of the "default" shaders present in a scene as they can + introduce problems when referenced (overriding local scene shaders). + + To fix this no shape nodes in the look must have any of default shaders + applied. + + """ + + order = ValidateContentsOrder + 0.01 + families = ['look'] + hosts = ['maya'] + label = 'Look No Default Shaders' + actions = [ayon_maya.api.action.SelectInvalidAction] + + DEFAULT_SHADERS = {"lambert1", "initialShadingGroup", + "initialParticleSE", "particleCloud1"} + + def process(self, instance): + """Process all the nodes in the instance""" + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("Invalid node relationships found: " + "{0}".format(invalid)) + + @classmethod + def get_invalid(cls, instance): + + invalid = set() + for node in instance: + # Get shading engine connections + shaders = cmds.listConnections(node, type="shadingEngine") or [] + + # Check for any disallowed connections on *all* nodes + if any(s in cls.DEFAULT_SHADERS for s in shaders): + + # Explicitly log each individual "wrong" connection. + for s in shaders: + if s in cls.DEFAULT_SHADERS: + cls.log.error("Node has unallowed connection to " + "'{}': {}".format(s, node)) + + invalid.add(node) + + return list(invalid) diff --git a/client/ayon_maya/plugins/publish/validate_look_sets.py b/client/ayon_maya/plugins/publish/validate_look_sets.py new file mode 100644 index 00000000..97c790c8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_sets.py @@ -0,0 +1,103 @@ +import pyblish.api +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateLookSets(pyblish.api.InstancePlugin): + """Validate if any sets relationships are not being collected. + + A shader can be assigned to a node that is missing a Colorbleed ID. + Because it is missing the ID it has not been collected in the instance. + This validator ensures those relationships and thus considers it invalid + if a relationship was not collected. + + When the relationship needs to be maintained the artist might need to + create a different* relationship or ensure the node has the Colorbleed ID. + + *The relationship might be too broad (assigned to top node of hierarchy). + This can be countered by creating the relationship on the shape or its + transform. In essence, ensure item the shader is assigned to has the + Colorbleed ID! + + Examples: + + - Displacement objectSets (like V-Ray): + + It is best practice to add the transform of the shape to the + displacement objectSet. Any parent groups will not work as groups + do not receive a Colorbleed Id. As such the assignments need to be + made to the shapes and their transform. + + Example content: + [asset_GRP|geometry_GRP|body_GES, + asset_GRP|geometry_GRP|L_eye_GES, + asset_GRP|geometry_GRP|R_eye_GES, + asset_GRP|geometry_GRP|wings_GEO] + + """ + + order = ValidateContentsOrder + families = ['look'] + hosts = ['maya'] + label = 'Look Sets' + actions = [ayon_maya.api.action.SelectInvalidAction] + + def process(self, instance): + """Process all the nodes in the instance""" + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("'{}' has invalid look " + "content".format(instance.name)) + + @classmethod + def get_invalid(cls, instance): + """Get all invalid nodes""" + + relationships = instance.data["lookData"]["relationships"] + invalid = [] + + renderlayer = instance.data.get("renderlayer", "defaultRenderLayer") + with lib.renderlayer(renderlayer): + for node in instance: + # get the connected objectSets of the node + sets = lib.get_related_sets(node) + if not sets: + continue + + # check if any objectSets are not present ion the relationships + missing_sets = [s for s in sets if s not in relationships] + if missing_sets: + for missing_set in missing_sets: + cls.log.debug(missing_set) + + if '_SET' not in missing_set: + # A set of this node is not coming along, this is wrong! + cls.log.error("Missing sets '{}' for node " + "'{}'".format(missing_sets, node)) + invalid.append(node) + continue + + # Ensure the node is in the sets that are collected + for shader_set, data in relationships.items(): + if shader_set not in sets: + # no need to check for a set if the node + # isn't in it anyway + continue + + member_nodes = [member['name'] for member in + data['members']] + if node not in member_nodes: + # The node is not found in the collected set + # relationships + cls.log.error("Missing '{}' in collected set node " + "'{}'".format(node, shader_set)) + invalid.append(node) + + continue + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_look_shading_group.py b/client/ayon_maya/plugins/publish/validate_look_shading_group.py new file mode 100644 index 00000000..25e4eb04 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_shading_group.py @@ -0,0 +1,75 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateShadingEngine(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate all shading engines are named after the surface material. + + Shading engines should be named "{surface_shader}SG" + """ + + order = ValidateContentsOrder + families = ["look"] + hosts = ["maya"] + label = "Look Shading Engine Naming" + actions = [ + ayon_maya.api.action.SelectInvalidAction, RepairAction + ] + optional = True + + # The default connections to check + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Found shading engines with incorrect naming:" + "\n{}".format(invalid) + ) + + @classmethod + def get_invalid(cls, instance): + shapes = cmds.ls(instance, type=["nurbsSurface", "mesh"], long=True) + invalid = [] + for shape in shapes: + shading_engines = cmds.listConnections( + shape, destination=True, type="shadingEngine" + ) or [] + for shading_engine in shading_engines: + materials = cmds.listConnections( + shading_engine + ".surfaceShader", + source=True, destination=False + ) + if not materials: + cls.log.warning( + "Shading engine '{}' has no material connected to its " + ".surfaceShader attribute.".format(shading_engine)) + continue + + material = materials[0] # there should only ever be one input + name = material + "SG" + if shading_engine != name: + invalid.append(shading_engine) + + return list(set(invalid)) + + @classmethod + def repair(cls, instance): + shading_engines = cls.get_invalid(instance) + for shading_engine in shading_engines: + name = ( + cmds.listConnections(shading_engine + ".surfaceShader")[0] + + "SG" + ) + cmds.rename(shading_engine, name) diff --git a/client/ayon_maya/plugins/publish/validate_look_single_shader.py b/client/ayon_maya/plugins/publish/validate_look_single_shader.py new file mode 100644 index 00000000..de0a9619 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_look_single_shader.py @@ -0,0 +1,59 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, ValidateContentsOrder) + + +class ValidateSingleShader(pyblish.api.InstancePlugin): + """Validate all nurbsSurfaces and meshes have exactly one shader assigned. + + This will error if a shape has no shaders or more than one shader. + + """ + + order = ValidateContentsOrder + families = ['look'] + hosts = ['maya'] + label = 'Look Single Shader Per Shape' + actions = [ayon_maya.api.action.SelectInvalidAction] + + # The default connections to check + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + ("Found shapes which don't have a single shader " + "assigned:\n{}").format(invalid)) + + @classmethod + def get_invalid(cls, instance): + + # Get all shapes from the instance + shapes = cmds.ls(instance, type=["nurbsSurface", "mesh"], long=True) + + # Check the number of connected shadingEngines per shape + no_shaders = [] + more_than_one_shaders = [] + for shape in shapes: + shading_engines = cmds.listConnections(shape, + destination=True, + type="shadingEngine") or [] + + # Only interested in unique shading engines. + shading_engines = list(set(shading_engines)) + + if not shading_engines: + no_shaders.append(shape) + elif len(shading_engines) > 1: + more_than_one_shaders.append(shape) + + if no_shaders: + cls.log.error("No shaders found on: {}".format(no_shaders)) + if more_than_one_shaders: + cls.log.error("More than one shader found on: " + "{}".format(more_than_one_shaders)) + + return no_shaders + more_than_one_shaders diff --git a/client/ayon_maya/plugins/publish/validate_maya_units.py b/client/ayon_maya/plugins/publish/validate_maya_units.py new file mode 100644 index 00000000..8171e7e8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_maya_units.py @@ -0,0 +1,134 @@ +import maya.cmds as cmds + +import pyblish.api + +import ayon_maya.api.lib as mayalib +from ayon_core.pipeline.publish import ( + RepairContextAction, + ValidateSceneOrder, + PublishXmlValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateMayaUnits(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): + """Check if the Maya units are set correct""" + + order = ValidateSceneOrder + label = "Maya Units" + hosts = ['maya'] + actions = [RepairContextAction] + + validate_linear_units = True + linear_units = "cm" + + validate_angular_units = True + angular_units = "deg" + + validate_fps = True + + nice_message_format = ( + "- {setting} must be {required_value}. " + "Your scene is set to {current_value}" + ) + log_message_format = ( + "Maya scene {setting} must be '{required_value}'. " + "Current value is '{current_value}'." + ) + optional = False + + @classmethod + def apply_settings(cls, project_settings): + """Apply project settings to creator""" + settings = ( + project_settings["maya"]["publish"]["ValidateMayaUnits"] + ) + + cls.validate_linear_units = settings.get("validate_linear_units", + cls.validate_linear_units) + cls.linear_units = settings.get("linear_units", cls.linear_units) + cls.validate_angular_units = settings.get("validate_angular_units", + cls.validate_angular_units) + cls.angular_units = settings.get("angular_units", cls.angular_units) + cls.validate_fps = settings.get("validate_fps", cls.validate_fps) + + def process(self, context): + if not self.is_active(context.data): + return + # Collected units + linearunits = context.data.get('linearUnits') + angularunits = context.data.get('angularUnits') + + fps = context.data.get('fps') + + folder_attributes = context.data["folderEntity"]["attrib"] + folder_fps = mayalib.convert_to_maya_fps(folder_attributes["fps"]) + + self.log.info('Units (linear): {0}'.format(linearunits)) + self.log.info('Units (angular): {0}'.format(angularunits)) + self.log.info('Units (time): {0} FPS'.format(fps)) + + invalid = [] + + # Check if units are correct + if ( + self.validate_linear_units + and linearunits + and linearunits != self.linear_units + ): + invalid.append({ + "setting": "Linear units", + "required_value": self.linear_units, + "current_value": linearunits + }) + + if ( + self.validate_angular_units + and angularunits + and angularunits != self.angular_units + ): + invalid.append({ + "setting": "Angular units", + "required_value": self.angular_units, + "current_value": angularunits + }) + + if self.validate_fps and fps and fps != folder_fps: + invalid.append({ + "setting": "FPS", + "required_value": folder_fps, + "current_value": fps + }) + + if invalid: + + issues = [] + for data in invalid: + self.log.error(self.log_message_format.format(**data)) + issues.append(self.nice_message_format.format(**data)) + issues = "\n".join(issues) + + raise PublishXmlValidationError( + plugin=self, + message="Invalid maya scene units", + formatting_data={"issues": issues} + ) + + @classmethod + def repair(cls, context): + """Fix the current FPS setting of the scene, set to PAL(25.0 fps)""" + + cls.log.info("Setting angular unit to '{}'".format(cls.angular_units)) + cmds.currentUnit(angle=cls.angular_units) + current_angle = cmds.currentUnit(query=True, angle=True) + cls.log.debug(current_angle) + + cls.log.info("Setting linear unit to '{}'".format(cls.linear_units)) + cmds.currentUnit(linear=cls.linear_units) + current_linear = cmds.currentUnit(query=True, linear=True) + cls.log.debug(current_linear) + + cls.log.info("Setting time unit to match project") + folder_entity = context.data["folderEntity"] + mayalib.set_scene_fps(folder_entity["attrib"]["fps"]) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_arnold_attributes.py b/client/ayon_maya/plugins/publish/validate_mesh_arnold_attributes.py new file mode 100644 index 00000000..5a9841b6 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_arnold_attributes.py @@ -0,0 +1,127 @@ +from maya import cmds +import pyblish.api + +import ayon_maya.api.action +from ayon_maya.api.lib import ( + maintained_selection, + delete_after, + undo_chunk, + get_attribute, + set_attribute +) +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, + RepairAction, + ValidateMeshOrder, + PublishValidationError +) + + +class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate the mesh has default Arnold attributes. + + It compares all Arnold attributes from a default mesh. This is to ensure + later published looks can discover non-default Arnold attributes. + """ + + order = ValidateMeshOrder + hosts = ["maya"] + families = ["model"] + label = "Mesh Arnold Attributes" + actions = [ + ayon_maya.api.action.SelectInvalidAction, + RepairAction + ] + + optional = True + + # cache (will be `dict` when cached) + arnold_mesh_defaults = None + + @classmethod + def get_default_attributes(cls): + + if cls.arnold_mesh_defaults is not None: + # Use from cache + return cls.arnold_mesh_defaults + + # Get default arnold attribute values for mesh type. + defaults = {} + with delete_after() as tmp: + transform = cmds.createNode("transform", skipSelect=True) + tmp.append(transform) + + mesh = cmds.createNode("mesh", parent=transform, skipSelect=True) + arnold_attributes = cmds.listAttr(mesh, + string="ai*", + fromPlugin=True) or [] + for attr in arnold_attributes: + plug = "{}.{}".format(mesh, attr) + try: + defaults[attr] = get_attribute(plug) + except PublishValidationError: + cls.log.debug("Ignoring arnold attribute: {}".format(attr)) + + cls.arnold_mesh_defaults = defaults # assign cache + return defaults + + @classmethod + def get_invalid_attributes(cls, instance, compute=False): + invalid = [] + + if compute: + + meshes = cmds.ls(instance, type="mesh", long=True) + if not meshes: + return [] + + # Compare the values against the defaults + defaults = cls.get_default_attributes() + for mesh in meshes: + for attr_name, default_value in defaults.items(): + plug = "{}.{}".format(mesh, attr_name) + if get_attribute(plug) != default_value: + invalid.append(plug) + + instance.data["nondefault_arnold_attributes"] = invalid + + return instance.data.get("nondefault_arnold_attributes", []) + + @classmethod + def get_invalid(cls, instance): + invalid_attrs = cls.get_invalid_attributes(instance, compute=False) + invalid_nodes = set(attr.split(".", 1)[0] for attr in invalid_attrs) + return sorted(invalid_nodes) + + @classmethod + def repair(cls, instance): + with maintained_selection(): + with undo_chunk(): + defaults = cls.get_default_attributes() + attributes = cls.get_invalid_attributes( + instance, compute=False + ) + for attr in attributes: + node, attr_name = attr.split(".", 1) + value = defaults[attr_name] + set_attribute( + node=node, + attribute=attr_name, + value=value + ) + + def process(self, instance): + if not self.is_active(instance.data): + return + + if not cmds.pluginInfo("mtoa", query=True, loaded=True): + # Arnold attributes only exist if plug-in is loaded + return + + invalid = self.get_invalid_attributes(instance, compute=True) + if invalid: + raise PublishValidationError( + "Non-default Arnold attributes found in instance:" + " {0}".format(invalid) + ) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_empty.py b/client/ayon_maya/plugins/publish/validate_mesh_empty.py new file mode 100644 index 00000000..ad2b08ba --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_empty.py @@ -0,0 +1,55 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, + PublishValidationError +) + + +class ValidateMeshEmpty(pyblish.api.InstancePlugin): + """Validate meshes have some vertices. + + Its possible to have meshes without any vertices. To replicate + this issue, delete all faces/polygons then all edges. + """ + + order = ValidateMeshOrder + hosts = ["maya"] + families = ["model"] + label = "Mesh Empty" + actions = [ + ayon_maya.api.action.SelectInvalidAction, RepairAction + ] + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + for node in invalid: + cmds.delete(node) + + @classmethod + def get_invalid(cls, instance): + invalid = [] + + meshes = cmds.ls(instance, type="mesh", long=True) + for mesh in meshes: + num_vertices = cmds.polyEvaluate(mesh, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "\"{}\" does not have any vertices.".format(mesh) + ) + invalid.append(mesh) + + return invalid + + def process(self, instance): + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Meshes found without any vertices: %s" % invalid + ) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_has_uv.py b/client/ayon_maya/plugins/publish/validate_mesh_has_uv.py new file mode 100644 index 00000000..25da24db --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_has_uv.py @@ -0,0 +1,89 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) +from ayon_maya.api.lib import len_flattened + + +class ValidateMeshHasUVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate the current mesh has UVs. + + It validates whether the current UV set has non-zero UVs and + at least more than the vertex count. It's not really bulletproof, + but a simple quick validation to check if there are likely + UVs for every face. + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh Has UVs' + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + @classmethod + def get_invalid(cls, instance): + invalid = [] + + for node in cmds.ls(instance, type='mesh'): + num_vertices = cmds.polyEvaluate(node, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "Skipping \"{}\", cause it does not have any " + "vertices.".format(node) + ) + continue + + uv = cmds.polyEvaluate(node, uv=True) + + if uv == 0: + invalid.append(node) + continue + + vertex = cmds.polyEvaluate(node, vertex=True) + if uv < vertex: + # Workaround: + # Maya can have instanced UVs in a single mesh, for example + # imported from an Alembic. With instanced UVs the UV count + # from `maya.cmds.polyEvaluate(uv=True)` will only result in + # the unique UV count instead of for all vertices. + # + # Note: Maya can save instanced UVs to `mayaAscii` but cannot + # load this as instanced. So saving, opening and saving + # again will lose this information. + map_attr = "{}.map[*]".format(node) + uv_to_vertex = cmds.polyListComponentConversion(map_attr, + toVertex=True) + uv_vertex_count = len_flattened(uv_to_vertex) + if uv_vertex_count < vertex: + invalid.append(node) + else: + cls.log.warning("Node has instanced UV points: " + "{0}".format(node)) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + + names = "
".join( + " - {}".format(node) for node in invalid + ) + + raise PublishValidationError( + title="Mesh has missing UVs", + message="Model meshes are required to have UVs.

" + "Meshes detected with invalid or missing UVs:
" + "{0}".format(names) + ) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_lamina_faces.py b/client/ayon_maya/plugins/publish/validate_mesh_lamina_faces.py new file mode 100644 index 00000000..ee6acd16 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_lamina_faces.py @@ -0,0 +1,55 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate meshes don't have lamina faces. + + Lamina faces share all of their edges. + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh Lamina Faces' + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + description = ( + "## Meshes with Lamina Faces\n" + "Detected meshes with lamina faces. Lamina faces are faces " + "that share all of their edges and thus are merged together on top of " + "each other.\n\n" + "### How to repair?\n" + "You can repair them by using Maya's modeling tool `Mesh > Cleanup..` " + "and select to cleanup matching polygons for lamina faces." + ) + + @staticmethod + def get_invalid(instance): + meshes = cmds.ls(instance, type='mesh', long=True) + invalid = [mesh for mesh in meshes if + cmds.polyInfo(mesh, laminaFaces=True)] + + return invalid + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Meshes found with lamina faces: {0}".format(invalid), + description=self.description) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_ngons.py b/client/ayon_maya/plugins/publish/validate_mesh_ngons.py new file mode 100644 index 00000000..f4d90763 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_ngons.py @@ -0,0 +1,69 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateMeshNgons(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure that meshes don't have ngons + + Ngon are faces with more than 4 sides. + + To debug the problem on the meshes you can use Maya's modeling + tool: "Mesh > Cleanup..." + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + label = "Mesh ngons" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + description = ( + "## Meshes with NGONs Faces\n" + "Detected meshes with NGON faces. **NGONS** are faces that " + "with more than four sides.\n\n" + "### How to repair?\n" + "You can repair them by usings Maya's modeling tool Mesh > Cleanup.. " + "and select to cleanup matching polygons for lamina faces." + ) + + @staticmethod + def get_invalid(instance): + + meshes = cmds.ls(instance, type='mesh', long=True) + + # Get all faces + faces = ['{0}.f[*]'.format(node) for node in meshes] + + # Skip meshes that for some reason have no faces, e.g. empty meshes + faces = cmds.ls(faces) + if not faces: + return [] + + # Filter to n-sided polygon faces (ngons) + invalid = lib.polyConstraint(faces, + t=0x0008, # type=face + size=3) # size=nsided + + return invalid + + def process(self, instance): + """Process all the nodes in the instance "objectSet""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Meshes found with n-gons: {0}".format(invalid), + description=self.description) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_maya/plugins/publish/validate_mesh_no_negative_scale.py new file mode 100644 index 00000000..7868015a --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -0,0 +1,68 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure that meshes don't have a negative scale. + + Using negatively scaled proxies in a VRayMesh results in inverted + normals. As such we want to avoid this. + + We also avoid this on the rig or model because these are often the + previous steps for those that are cached to proxies so we can catch this + issue early. + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh No Negative Scale' + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + @staticmethod + def get_invalid(instance): + meshes = cmds.ls(instance, + type='mesh', + long=True, + noIntermediate=True) + + invalid = [] + for mesh in meshes: + transform = cmds.listRelatives(mesh, parent=True, fullPath=True)[0] + scale = cmds.getAttr("{0}.scale".format(transform))[0] + + if any(x < 0 for x in scale): + invalid.append(mesh) + + return invalid + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Meshes found with negative scale:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Negative scale" + ) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_non_manifold.py b/client/ayon_maya/plugins/publish/validate_mesh_non_manifold.py new file mode 100644 index 00000000..dc561be9 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_non_manifold.py @@ -0,0 +1,170 @@ +from maya import cmds, mel + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + PublishXmlValidationError, + RepairAction, + OptionalPyblishPluginMixin +) + + +def poly_cleanup(version=4, + meshes=None, + # Version 1 + all_meshes=False, + select_only=False, + history_on=True, + quads=False, + nsided=False, + concave=False, + holed=False, + nonplanar=False, + zeroGeom=False, + zeroGeomTolerance=1e-05, + zeroEdge=False, + zeroEdgeTolerance=1e-05, + zeroMap=False, + zeroMapTolerance=1e-05, + # Version 2 + shared_uvs=False, + non_manifold=False, + # Version 3 + lamina=False, + # Version 4 + invalid_components=False): + """Wrapper around `polyCleanupArgList` mel command""" + + # Get all inputs named as `dict` to easily do conversions and formatting + values = locals() + + # Convert booleans to 1 or 0 + for key in [ + "all_meshes", + "select_only", + "history_on", + "quads", + "nsided", + "concave", + "holed", + "nonplanar", + "zeroGeom", + "zeroEdge", + "zeroMap", + "shared_uvs", + "non_manifold", + "lamina", + "invalid_components", + ]: + values[key] = 1 if values[key] else 0 + + cmd = ( + 'polyCleanupArgList {version} {{ ' + '"{all_meshes}",' # 0: All selectable meshes + '"{select_only}",' # 1: Only perform a selection + '"{history_on}",' # 2: Keep construction history + '"{quads}",' # 3: Check for quads polys + '"{nsided}",' # 4: Check for n-sides polys + '"{concave}",' # 5: Check for concave polys + '"{holed}",' # 6: Check for holed polys + '"{nonplanar}",' # 7: Check for non-planar polys + '"{zeroGeom}",' # 8: Check for 0 area faces + '"{zeroGeomTolerance}",' # 9: Tolerance for face areas + '"{zeroEdge}",' # 10: Check for 0 length edges + '"{zeroEdgeTolerance}",' # 11: Tolerance for edge length + '"{zeroMap}",' # 12: Check for 0 uv face area + '"{zeroMapTolerance}",' # 13: Tolerance for uv face areas + '"{shared_uvs}",' # 14: Unshare uvs that are shared + # across vertices + '"{non_manifold}",' # 15: Check for nonmanifold polys + '"{lamina}",' # 16: Check for lamina polys + '"{invalid_components}"' # 17: Remove invalid components + ' }};'.format(**values) + ) + + mel.eval("source polyCleanupArgList") + if not all_meshes and meshes: + # Allow to specify meshes to run over by selecting them + cmds.select(meshes, replace=True) + mel.eval(cmd) + + +class CleanupMatchingPolygons(RepairAction): + label = "Cleanup matching polygons" + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateMeshNonManifold(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure that meshes don't have non-manifold edges or vertices + + To debug the problem on the meshes you can use Maya's modeling + tool: "Mesh > Cleanup..." + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh Non-Manifold Edges/Vertices' + actions = [ayon_maya.api.action.SelectInvalidAction, + CleanupMatchingPolygons] + optional = True + + @staticmethod + def get_invalid(instance): + + meshes = cmds.ls(instance, type='mesh', long=True) + + invalid = [] + for mesh in meshes: + components = cmds.polyInfo(mesh, + nonManifoldVertices=True, + nonManifoldEdges=True) + if components: + invalid.extend(components) + + return invalid + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + + if invalid: + # Report only the meshes instead of all component indices + invalid_meshes = { + component.split(".", 1)[0] for component in invalid + } + invalid_meshes = _as_report_list(sorted(invalid_meshes)) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Meshes found with non-manifold " + "edges/vertices:\n\n{0}".format(invalid_meshes) + ) + ) + + @classmethod + def repair(cls, instance): + invalid_components = cls.get_invalid(instance) + if not invalid_components: + cls.log.info("No invalid components found to cleanup.") + return + + invalid_meshes = { + component.split(".", 1)[0] for component in invalid_components + } + poly_cleanup(meshes=list(invalid_meshes), + select_only=True, + non_manifold=True) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_non_zero_edge.py b/client/ayon_maya/plugins/publish/validate_mesh_non_zero_edge.py new file mode 100644 index 00000000..7830e10f --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_non_zero_edge.py @@ -0,0 +1,83 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate meshes don't have edges with a zero length. + + Based on Maya's polyCleanup 'Edges with zero length'. + + Note: + This can be slow for high-res meshes. + + """ + + order = ValidateMeshOrder + families = ['model'] + hosts = ['maya'] + label = 'Mesh Edge Length Non Zero' + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + __tolerance = 1e-5 + + @classmethod + def get_invalid(cls, instance): + """Return the invalid edges. + + Also see: + + http://help.autodesk.com/view/MAYAUL/2015/ENU/?guid=Mesh__Cleanup + + """ + + meshes = cmds.ls(instance, type='mesh', long=True) + if not meshes: + return list() + + valid_meshes = [] + for mesh in meshes: + num_vertices = cmds.polyEvaluate(mesh, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "Skipping \"{}\", cause it does not have any " + "vertices.".format(mesh) + ) + continue + + valid_meshes.append(mesh) + + # Get all edges + edges = ['{0}.e[*]'.format(node) for node in valid_meshes] + + # Filter by constraint on edge length + invalid = lib.polyConstraint(edges, + t=0x8000, # type=edge + length=1, + lengthbound=(0, cls.__tolerance)) + + return invalid + + def process(self, instance): + """Process all meshes""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + label = "Meshes found with zero edge length" + raise PublishValidationError( + message="{}: {}".format(label, invalid), + title=label, + description="{}:\n- ".format(label) + "\n- ".join(invalid) + ) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_normals_unlocked.py b/client/ayon_maya/plugins/publish/validate_mesh_normals_unlocked.py new file mode 100644 index 00000000..2d837016 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -0,0 +1,78 @@ +from maya import cmds +import maya.api.OpenMaya as om2 + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateMeshNormalsUnlocked(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate all meshes in the instance have unlocked normals + + These can be unlocked manually through: + Modeling > Mesh Display > Unlock Normals + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh Normals Unlocked' + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + optional = True + + @staticmethod + def has_locked_normals(mesh): + """Return whether mesh has at least one locked normal""" + + sel = om2.MGlobal.getSelectionListByName(mesh) + node = sel.getDependNode(0) + fn_mesh = om2.MFnMesh(node) + _, normal_ids = fn_mesh.getNormalIds() + for normal_id in normal_ids: + if fn_mesh.isNormalLocked(normal_id): + return True + return False + + @classmethod + def get_invalid(cls, instance): + """Return the meshes with locked normals in instance""" + + meshes = cmds.ls(instance, type='mesh', long=True) + return [mesh for mesh in meshes if cls.has_locked_normals(mesh)] + + def process(self, instance): + """Raise invalid when any of the meshes have locked normals""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Meshes found with locked normals:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Locked normals" + ) + + @classmethod + def repair(cls, instance): + """Unlocks all normals on the meshes in this instance.""" + invalid = cls.get_invalid(instance) + for mesh in invalid: + cmds.polyNormalPerVertex(mesh, unFreezeNormal=True) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_overlapping_uvs.py b/client/ayon_maya/plugins/publish/validate_mesh_overlapping_uvs.py new file mode 100644 index 00000000..9606ec32 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_overlapping_uvs.py @@ -0,0 +1,306 @@ +import math +from six.moves import xrange + +from maya import cmds +import maya.api.OpenMaya as om +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class GetOverlappingUVs(object): + + def _createBoundingCircle(self, meshfn): + """ Represent a face by center and radius + + :param meshfn: MFnMesh class + :type meshfn: :class:`maya.api.OpenMaya.MFnMesh` + :returns: (center, radius) + :rtype: tuple + """ + center = [] + radius = [] + for i in xrange(meshfn.numPolygons): # noqa: F821 + # get uvs from face + uarray = [] + varray = [] + for j in range(len(meshfn.getPolygonVertices(i))): + uv = meshfn.getPolygonUV(i, j) + uarray.append(uv[0]) + varray.append(uv[1]) + + # loop through all vertices to construct edges/rays + cu = 0.0 + cv = 0.0 + for j in range(len(uarray)): + cu += uarray[j] + cv += varray[j] + + cu /= len(uarray) + cv /= len(varray) + rsqr = 0.0 + for j in range(len(varray)): + du = uarray[j] - cu + dv = varray[j] - cv + dsqr = du * du + dv * dv + rsqr = dsqr if dsqr > rsqr else rsqr + + center.append(cu) + center.append(cv) + radius.append(math.sqrt(rsqr)) + + return center, radius + + def _createRayGivenFace(self, meshfn, faceId): + """ Represent a face by a series of edges(rays), i.e. + + :param meshfn: MFnMesh class + :type meshfn: :class:`maya.api.OpenMaya.MFnMesh` + :param faceId: face id + :type faceId: int + :returns: False if no valid uv's. + ""(True, orig, vec)"" or ""(False, None, None)"" + :rtype: tuple + + .. code-block:: python + + orig = [orig1u, orig1v, orig2u, orig2v, ... ] + vec = [vec1u, vec1v, vec2u, vec2v, ... ] + """ + orig = [] + vec = [] + # get uvs + uarray = [] + varray = [] + for i in range(len(meshfn.getPolygonVertices(faceId))): + uv = meshfn.getPolygonUV(faceId, i) + uarray.append(uv[0]) + varray.append(uv[1]) + + if len(uarray) == 0 or len(varray) == 0: + return (False, None, None) + + # loop through all vertices to construct edges/rays + u = uarray[-1] + v = varray[-1] + for i in xrange(len(uarray)): # noqa: F821 + orig.append(uarray[i]) + orig.append(varray[i]) + vec.append(u - uarray[i]) + vec.append(v - varray[i]) + u = uarray[i] + v = varray[i] + + return (True, orig, vec) + + def _checkCrossingEdges(self, + face1Orig, + face1Vec, + face2Orig, + face2Vec): + """ Check if there are crossing edges between two faces. + Return True if there are crossing edges and False otherwise. + + :param face1Orig: origin of face 1 + :type face1Orig: tuple + :param face1Vec: face 1 edges + :type face1Vec: list + :param face2Orig: origin of face 2 + :type face2Orig: tuple + :param face2Vec: face 2 edges + :type face2Vec: list + + A face is represented by a series of edges(rays), i.e. + .. code-block:: python + + faceOrig[] = [orig1u, orig1v, orig2u, orig2v, ... ] + faceVec[] = [vec1u, vec1v, vec2u, vec2v, ... ] + """ + face1Size = len(face1Orig) + face2Size = len(face2Orig) + for i in xrange(0, face1Size, 2): # noqa: F821 + o1x = face1Orig[i] + o1y = face1Orig[i+1] + v1x = face1Vec[i] + v1y = face1Vec[i+1] + n1x = v1y + n1y = -v1x + for j in xrange(0, face2Size, 2): # noqa: F821 + # Given ray1(O1, V1) and ray2(O2, V2) + # Normal of ray1 is (V1.y, V1.x) + o2x = face2Orig[j] + o2y = face2Orig[j+1] + v2x = face2Vec[j] + v2y = face2Vec[j+1] + n2x = v2y + n2y = -v2x + + # Find t for ray2 + # t = [(o1x-o2x)n1x + (o1y-o2y)n1y] / + # (v2x * n1x + v2y * n1y) + denum = v2x * n1x + v2y * n1y + # Edges are parallel if denum is close to 0. + if math.fabs(denum) < 0.000001: + continue + t2 = ((o1x-o2x) * n1x + (o1y-o2y) * n1y) / denum + if (t2 < 0.00001 or t2 > 0.99999): + continue + + # Find t for ray1 + # t = [(o2x-o1x)n2x + # + (o2y-o1y)n2y] / (v1x * n2x + v1y * n2y) + denum = v1x * n2x + v1y * n2y + # Edges are parallel if denum is close to 0. + if math.fabs(denum) < 0.000001: + continue + t1 = ((o2x-o1x) * n2x + (o2y-o1y) * n2y) / denum + + # Edges intersect + if (t1 > 0.00001 and t1 < 0.99999): + return 1 + + return 0 + + def _getOverlapUVFaces(self, meshName): + """ Return overlapping faces + + :param meshName: name of mesh + :type meshName: str + :returns: list of overlapping faces + :rtype: list + """ + faces = [] + # find polygon mesh node + selList = om.MSelectionList() + selList.add(meshName) + mesh = selList.getDependNode(0) + if mesh.apiType() == om.MFn.kTransform: + dagPath = selList.getDagPath(0) + dagFn = om.MFnDagNode(dagPath) + child = dagFn.child(0) + if child.apiType() != om.MFn.kMesh: + raise Exception("Can't find polygon mesh") + mesh = child + meshfn = om.MFnMesh(mesh) + + center, radius = self._createBoundingCircle(meshfn) + for i in xrange(meshfn.numPolygons): # noqa: F821 + rayb1, face1Orig, face1Vec = self._createRayGivenFace(meshfn, i) + if not rayb1: + continue + cui = center[2*i] + cvi = center[2*i+1] + ri = radius[i] + # Exclude the degenerate face + # if(area(face1Orig) < 0.000001) continue; + # Loop through face j where j != i + for j in range(i+1, meshfn.numPolygons): + cuj = center[2*j] + cvj = center[2*j+1] + rj = radius[j] + du = cuj - cui + dv = cvj - cvi + dsqr = du * du + dv * dv + # Quick rejection if bounding circles don't overlap + if (dsqr >= (ri + rj) * (ri + rj)): + continue + + rayb2, face2Orig, face2Vec = self._createRayGivenFace(meshfn, + j) + if not rayb2: + continue + # Exclude the degenerate face + # if(area(face2Orig) < 0.000001): continue; + if self._checkCrossingEdges(face1Orig, + face1Vec, + face2Orig, + face2Vec): + face1 = '%s.f[%d]' % (meshfn.name(), i) + face2 = '%s.f[%d]' % (meshfn.name(), j) + if face1 not in faces: + faces.append(face1) + if face2 not in faces: + faces.append(face2) + return faces + + +class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """ Validate the current mesh overlapping UVs. + + It validates whether the current UVs are overlapping or not. + It is optional to warn publisher about it. + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh Has Overlapping UVs' + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + @classmethod + def _get_overlapping_uvs(cls, mesh): + """Return overlapping UVs of mesh. + + Args: + mesh (str): Mesh node name + + Returns: + list: Overlapping uvs for the input mesh in all uv sets. + + """ + ovl = GetOverlappingUVs() + + # Store original uv set + original_current_uv_set = cmds.polyUVSet(mesh, + query=True, + currentUVSet=True)[0] + + overlapping_faces = [] + for uv_set in cmds.polyUVSet(mesh, query=True, allUVSets=True): + cmds.polyUVSet(mesh, currentUVSet=True, uvSet=uv_set) + overlapping_faces.extend(ovl._getOverlapUVFaces(mesh)) + + # Restore original uv set + cmds.polyUVSet(mesh, currentUVSet=True, uvSet=original_current_uv_set) + + return overlapping_faces + + @classmethod + def get_invalid(cls, instance, compute=False): + + if compute: + invalid = [] + for node in cmds.ls(instance, type="mesh"): + faces = cls._get_overlapping_uvs(node) + invalid.extend(faces) + + instance.data["overlapping_faces"] = invalid + + return instance.data.get("overlapping_faces", []) + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance, compute=True) + if invalid: + raise PublishValidationError( + "Meshes found with overlapping UVs:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Overlapping UVs" + ) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_shader_connections.py b/client/ayon_maya/plugins/publish/validate_mesh_shader_connections.py new file mode 100644 index 00000000..2e91df87 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_shader_connections.py @@ -0,0 +1,130 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +def pairs(iterable): + """Iterate over iterable per group of two""" + a = iter(iterable) + for i, y in zip(a, a): + yield i, y + + +def get_invalid_sets(shapes): + """Return invalid sets for the given shapes. + + This takes a list of shape nodes to cache the set members for overlapping + sets in the queries. This avoids many Maya set member queries. + + Returns: + dict: Dictionary of shapes and their invalid sets, e.g. + {"pCubeShape": ["set1", "set2"]} + + """ + + cache = dict() + invalid = dict() + + # Collect the sets from the shape + for shape in shapes: + invalid_sets = [] + sets = cmds.listSets(object=shape, t=1, extendToShape=False) or [] + for set_ in sets: + + members = cache.get(set_, None) + if members is None: + members = set(cmds.ls(cmds.sets(set_, + query=True, + nodesOnly=True), long=True)) + cache[set_] = members + + # If the shape is not actually present as a member of the set + # consider it invalid + if shape not in members: + invalid_sets.append(set_) + + if invalid_sets: + invalid[shape] = invalid_sets + + return invalid + + +def disconnect(node_a, node_b): + """Remove all connections between node a and b.""" + + # Disconnect outputs + outputs = cmds.listConnections(node_a, + plugs=True, + connections=True, + source=False, + destination=True) + for output, destination in pairs(outputs): + if destination.split(".", 1)[0] == node_b: + cmds.disconnectAttr(output, destination) + + # Disconnect inputs + inputs = cmds.listConnections(node_a, + plugs=True, + connections=True, + source=True, + destination=False) + for input, source in pairs(inputs): + if source.split(".", 1)[0] == node_b: + cmds.disconnectAttr(source, input) + + +class ValidateMeshShaderConnections(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure mesh shading engine connections are valid. + + In some scenarios Maya keeps connections to multiple shaders even if just + a single one is assigned on the shape. + + These are related sets returned by `maya.cmds.listSets` that don't + actually have the shape as member. + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = "Mesh Shader Connections" + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + optional = True + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Shapes found with invalid shader connections: " + "{0}".format(invalid)) + + @staticmethod + def get_invalid(instance): + + nodes = instance[:] + shapes = cmds.ls(nodes, noIntermediate=True, long=True, type="mesh") + invalid = get_invalid_sets(shapes).keys() + + return invalid + + @classmethod + def repair(cls, instance): + + shapes = cls.get_invalid(instance) + invalid = get_invalid_sets(shapes) + for shape, invalid_sets in invalid.items(): + for set_node in invalid_sets: + disconnect(shape, set_node) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_single_uv_set.py b/client/ayon_maya/plugins/publish/validate_mesh_single_uv_set.py new file mode 100644 index 00000000..d807833a --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_single_uv_set.py @@ -0,0 +1,75 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Warn on multiple UV sets existing for each polygon mesh. + + On versions prior to Maya 2017 this will force no multiple uv sets because + the Alembic exports in Maya prior to 2017 don't support writing multiple + UV sets. + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model', 'pointcache'] + optional = True + label = "Mesh Single UV Set" + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + + @staticmethod + def get_invalid(instance): + + meshes = cmds.ls(instance, type='mesh', long=True) + + invalid = [] + for mesh in meshes: + uvSets = cmds.polyUVSet(mesh, + query=True, + allUVSets=True) or [] + + # ensure unique (sometimes maya will list 'map1' twice) + uvSets = set(uvSets) + + if len(uvSets) != 1: + invalid.append(mesh) + + return invalid + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + + if invalid: + + message = "Nodes found with multiple UV sets: {0}".format(invalid) + + # Maya 2017 and up allows multiple UV sets in Alembic exports + # so we allow it, yet just warn the user to ensure they know about + # the other UV sets. + allowed = int(cmds.about(version=True)) >= 2017 + + if allowed: + self.log.warning(message) + else: + raise PublishValidationError(message) + + @classmethod + def repair(cls, instance): + for mesh in cls.get_invalid(instance): + lib.remove_other_uv_sets(mesh) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_uv_set_map1.py b/client/ayon_maya/plugins/publish/validate_mesh_uv_set_map1.py new file mode 100644 index 00000000..c4d3abc6 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -0,0 +1,138 @@ +import inspect + +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate model's default set exists and is named 'map1'. + + In Maya meshes by default have a uv set named "map1" that cannot be + deleted. It can be renamed however, introducing some issues with some + renderers. As such we ensure the first (default) UV set index is named + "map1". + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + optional = True + label = "Mesh has map1 UV Set" + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + + @classmethod + def get_invalid(cls, instance): + + meshes = cmds.ls(instance, type='mesh', long=True) + + invalid = [] + for mesh in meshes: + + # Get existing mapping of uv sets by index + indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) + maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + cls.log.warning("Mesh has no UV set: %s", mesh) + invalid.append(mesh) + continue + + mapping = dict(zip(indices, maps)) + + # Get the uv set at index zero. + name = mapping[0] + if name != "map1": + invalid.append(mesh) + + return invalid + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + + invalid_list = "\n".join(f"- {node}" for node in invalid) + + raise PublishValidationError( + "Meshes found without 'map1' UV set:\n" + "{0}".format(invalid_list), + description=self.get_description() + ) + + @classmethod + def repair(cls, instance): + """Rename uv map at index zero to map1""" + + for mesh in cls.get_invalid(instance): + + # Get existing mapping of uv sets by index + indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) + maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + # No UV set exist at all, create a `map1` uv set + # This may fail silently if the mesh has no geometry at all + cmds.polyUVSet(mesh, create=True, uvSet="map1") + continue + + mapping = dict(zip(indices, maps)) + + # Ensure there is no uv set named map1 to avoid + # a clash on renaming the "default uv set" to map1 + existing = set(maps) + if "map1" in existing: + + # Find a unique name index + i = 2 + while True: + name = "map{0}".format(i) + if name not in existing: + break + i += 1 + + cls.log.warning("Renaming clashing uv set name on mesh" + " %s to '%s'", mesh, name) + + cmds.polyUVSet(mesh, + rename=True, + uvSet="map1", + newUVSet=name) + + # Rename the initial index to map1 + original = mapping[0] + cmds.polyUVSet(mesh, + rename=True, + uvSet=original, + newUVSet="map1") + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Mesh found without map1 uv set + + A mesh must have a default UV set named `map1` to adhere to the default + mesh behavior of Maya meshes. + + There may be meshes that: + - Have no UV set + - Have no `map1` uv set but are using a different name + - Have a `map1` uv set, but it's not the default (first index) + + + #### Repair + + Using repair will try to make the first UV set the `map1` uv set. If it + does not exist yet it will be created or renames the current first + UV set to `map1`. + """) diff --git a/client/ayon_maya/plugins/publish/validate_mesh_vertices_have_edges.py b/client/ayon_maya/plugins/publish/validate_mesh_vertices_have_edges.py new file mode 100644 index 00000000..9dbdc8fd --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -0,0 +1,87 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_maya.api.lib import len_flattened +from ayon_core.pipeline.publish import ( + PublishValidationError, + RepairAction, + ValidateMeshOrder, + OptionalPyblishPluginMixin +) + + +class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate meshes have only vertices that are connected to edges. + + Maya can have invalid geometry with vertices that have no edges or + faces connected to them. + + In Maya 2016 EXT 2 and later there's a command to fix this: + `maya.cmds.polyClean(mesh, cleanVertices=True)` + + In older versions of Maya it works to select the invalid vertices + and merge the components. + + To find these invalid vertices select all vertices of the mesh + that are visible in the viewport (drag to select), afterwards + invert your selection (Ctrl + Shift + I). The remaining selection + contains the invalid vertices. + + """ + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Mesh Vertices Have Edges' + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + optional = True + + @classmethod + def repair(cls, instance): + + # This fix only works in Maya 2016 EXT2 and newer + if float(cmds.about(version=True)) <= 2016.0: + raise PublishValidationError( + ("Repair not supported in Maya version below " + "2016 EXT 2")) + + invalid = cls.get_invalid(instance) + for node in invalid: + cmds.polyClean(node, cleanVertices=True) + + @classmethod + def get_invalid(cls, instance): + invalid = [] + + meshes = cmds.ls(instance, type="mesh", long=True) + for mesh in meshes: + num_vertices = cmds.polyEvaluate(mesh, vertex=True) + + if num_vertices == 0: + cls.log.warning( + "Skipping \"{}\", cause it does not have any " + "vertices.".format(mesh) + ) + continue + + # Vertices from all edges + edges = "%s.e[*]" % mesh + vertices = cmds.polyListComponentConversion(edges, toVertex=True) + num_vertices_from_edges = len_flattened(vertices) + + if num_vertices != num_vertices_from_edges: + invalid.append(mesh) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + ("Meshes found in instance with vertices that " + "have no edges: {}").format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_model_content.py b/client/ayon_maya/plugins/publish/validate_model_content.py new file mode 100644 index 00000000..b06b45db --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_model_content.py @@ -0,0 +1,137 @@ +import inspect + +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateModelContent(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Adheres to the content of 'model' product type + + See `get_description` for more details. + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + label = "Model Content" + actions = [ayon_maya.api.action.SelectInvalidAction] + + validate_top_group = True + optional = False + + allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') + + @classmethod + def get_invalid(cls, instance): + + content_instance = instance.data.get("setMembers", None) + if not content_instance: + cls.log.error("Model instance has no nodes. " + "It is not allowed to be empty") + return [instance.data["instance_node"]] + + # All children will be included in the extracted export so we also + # validate *all* descendents of the set members and we skip any + # intermediate shapes + descendants = cmds.listRelatives(content_instance, + allDescendents=True, + fullPath=True) or [] + descendants = cmds.ls(descendants, noIntermediate=True, long=True) + content_instance = list(set(content_instance + descendants)) + + # Ensure only valid node types + nodes = cmds.ls(content_instance, long=True) + valid = cmds.ls(content_instance, long=True, type=cls.allowed) + invalid = set(nodes) - set(valid) + + if invalid: + # List as bullet points + invalid_bullets = "\n".join(f"- {node}" for node in invalid) + + cls.log.error( + "These nodes are not allowed:\n{}\n\n" + "The valid node types are: {}".format( + invalid_bullets, ", ".join(cls.allowed)) + ) + return list(invalid) + + if not valid: + cls.log.error( + "No valid nodes in the model instance.\n" + "The valid node types are: {}".format(", ".join(cls.allowed)) + ) + return [instance.data["instance_node"]] + + # Ensure it has shapes + shapes = cmds.ls(valid, long=True, shapes=True) + if not shapes: + cls.log.error("No shapes in the model instance") + return [instance.data["instance_node"]] + + # Ensure single top group + top_parents = {"|" + x.split("|", 2)[1] for x in content_instance} + if cls.validate_top_group and len(top_parents) != 1: + cls.log.error( + "A model instance must have exactly one top group. " + "Found top groups: {}".format(", ".join(top_parents)) + ) + return list(top_parents) + + def _is_visible(node): + """Return whether node is visible""" + return lib.is_visible(node, + displayLayer=False, + intermediateObject=True, + parentHidden=True, + visibility=True) + + # The roots must be visible (the assemblies) + for parent in top_parents: + if not _is_visible(parent): + cls.log.error("Invisible parent (root node) is not " + "allowed: {0}".format(parent)) + invalid.add(parent) + + # Ensure at least one shape is visible + if not any(_is_visible(shape) for shape in shapes): + cls.log.error("No visible shapes in the model instance") + invalid.update(shapes) + + return list(invalid) + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + title="Model content is invalid", + message="Model content is invalid. See log for more details.", + description=self.get_description() + ) + + @classmethod + def get_description(cls): + return inspect.cleandoc(f""" + ### Model content is invalid + + Your model instance does not adhere to the rules of a + model product type: + + - Must have at least one visible shape in it, like a mesh. + - Must have one root node. When exporting multiple meshes they + must be inside a group. + - May only contain the following node types: + {", ".join(cls.allowed)} + """) diff --git a/client/ayon_maya/plugins/publish/validate_mvlook_contents.py b/client/ayon_maya/plugins/publish/validate_mvlook_contents.py new file mode 100644 index 00000000..980565af --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_mvlook_contents.py @@ -0,0 +1,103 @@ +import os +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +COLOUR_SPACES = ['sRGB', 'linear', 'auto'] +MIPMAP_EXTENSIONS = ['tdl'] + + +class ValidateMvLookContents(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + order = ValidateContentsOrder + families = ['mvLook'] + hosts = ['maya'] + label = 'Validate mvLook Data' + actions = [ayon_maya.api.action.SelectInvalidAction] + + # Allow this validation step to be skipped when you just need to + # get things pushed through. + optional = True + + # These intents get enforced checks, other ones get warnings. + enforced_intents = ['-', 'Final'] + + def process(self, instance): + if not self.is_active(instance.data): + return + + intent = instance.context.data['intent']['value'] + publishMipMap = instance.data["publishMipMap"] + enforced = True + if intent in self.enforced_intents: + self.log.debug("This validation will be enforced: '{}'" + .format(intent)) + else: + enforced = False + self.log.debug("This validation will NOT be enforced: '{}'" + .format(intent)) + + if not instance[:]: + raise PublishValidationError("Instance is empty") + + invalid = set() + + resources = instance.data.get("resources", []) + for resource in resources: + files = resource["files"] + self.log.debug( + "Resource '{}', files: [{}]".format(resource, files)) + node = resource["node"] + if len(files) == 0: + self.log.error("File node '{}' uses no or non-existing " + "files".format(node)) + invalid.add(node) + continue + for fname in files: + if not self.valid_file(fname): + self.log.error("File node '{}'/'{}' is not valid" + .format(node, fname)) + invalid.add(node) + + if publishMipMap and not self.is_or_has_mipmap(fname, files): + msg = "File node '{}'/'{}' does not have a mipmap".format( + node, fname) + if enforced: + invalid.add(node) + self.log.error(msg) + raise PublishValidationError(msg) + else: + self.log.warning(msg) + + if invalid: + raise PublishValidationError( + "'{}' has invalid look content".format(instance.name) + ) + + def valid_file(self, fname): + self.log.debug("Checking validity of '{}'".format(fname)) + if not os.path.exists(fname): + return False + if os.path.getsize(fname) == 0: + return False + return True + + def is_or_has_mipmap(self, fname, files): + ext = os.path.splitext(fname)[1][1:] + if ext in MIPMAP_EXTENSIONS: + self.log.debug(" - Is a mipmap '{}'".format(fname)) + return True + + for colour_space in COLOUR_SPACES: + for mipmap_ext in MIPMAP_EXTENSIONS: + mipmap_fname = '.'.join([fname, colour_space, mipmap_ext]) + if mipmap_fname in files: + self.log.debug( + " - Has a mipmap '{}'".format(mipmap_fname)) + return True + return False diff --git a/client/ayon_maya/plugins/publish/validate_no_animation.py b/client/ayon_maya/plugins/publish/validate_no_animation.py new file mode 100644 index 00000000..0e1f1794 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_no_animation.py @@ -0,0 +1,60 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateNoAnimation(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure no keyframes on nodes in the Instance. + + Even though a Model would extract without animCurves correctly this avoids + getting different output from a model when extracted from a different + frame than the first frame. (Might be overly restrictive though) + + """ + + order = ValidateContentsOrder + label = "No Animation" + hosts = ["maya"] + families = ["model"] + optional = True + actions = [ayon_maya.api.action.SelectInvalidAction] + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Keyframes found on:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Keyframes on model" + ) + + @staticmethod + def get_invalid(instance): + + nodes = instance[:] + if not nodes: + return [] + + curves = cmds.keyframe(nodes, query=True, name=True) + if curves: + return list(set(cmds.listConnections(curves))) + + return [] diff --git a/client/ayon_maya/plugins/publish/validate_no_default_camera.py b/client/ayon_maya/plugins/publish/validate_no_default_camera.py new file mode 100644 index 00000000..be469cd4 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_no_default_camera.py @@ -0,0 +1,52 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateNoDefaultCameras(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure no default (startup) cameras are in the instance. + + This might be unnecessary. In the past there were some issues with + referencing/importing files that contained the start up cameras overriding + settings when being loaded and sometimes being skipped. + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['camera'] + label = "No Default Cameras" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + @staticmethod + def get_invalid(instance): + cameras = cmds.ls(instance, type='camera', long=True) + return [cam for cam in cameras if + cmds.camera(cam, query=True, startupCamera=True)] + + def process(self, instance): + """Process all the cameras in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Default cameras found:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Default cameras" + ) diff --git a/client/ayon_maya/plugins/publish/validate_no_namespace.py b/client/ayon_maya/plugins/publish/validate_no_namespace.py new file mode 100644 index 00000000..0d7b67e1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_no_namespace.py @@ -0,0 +1,78 @@ +import maya.cmds as cmds + +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + +import ayon_maya.api.action + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +def get_namespace(node_name): + # ensure only node's name (not parent path) + node_name = node_name.rsplit("|", 1)[-1] + # ensure only namespace + return node_name.rpartition(":")[0] + + +class ValidateNoNamespace(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure the nodes don't have a namespace""" + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['model'] + label = 'No Namespaces' + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + optional = False + + @staticmethod + def get_invalid(instance): + nodes = cmds.ls(instance, long=True) + return [node for node in nodes if get_namespace(node)] + + def process(self, instance): + """Process all the nodes in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + invalid_namespaces = {get_namespace(node) for node in invalid} + raise PublishValidationError( + message="Namespaces found:\n\n{0}".format( + _as_report_list(sorted(invalid_namespaces)) + ), + title="Namespaces in model", + description=( + "## Namespaces found in model\n" + "It is not allowed to publish a model that contains " + "namespaces." + ) + ) + + @classmethod + def repair(cls, instance): + """Remove all namespaces from the nodes in the instance""" + + invalid = cls.get_invalid(instance) + + # Iterate over the nodes by long to short names to iterate the lowest + # in hierarchy nodes first. This way we avoid having renamed parents + # before renaming children nodes + for node in sorted(invalid, key=len, reverse=True): + + node_name = node.rsplit("|", 1)[-1] + node_name_without_namespace = node_name.rsplit(":")[-1] + cmds.rename(node, node_name_without_namespace) diff --git a/client/ayon_maya/plugins/publish/validate_no_null_transforms.py b/client/ayon_maya/plugins/publish/validate_no_null_transforms.py new file mode 100644 index 00000000..876be073 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_no_null_transforms.py @@ -0,0 +1,92 @@ +import maya.cmds as cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +def has_shape_children(node): + # Check if any descendants + all_descendents = cmds.listRelatives(node, + allDescendents=True, + fullPath=True) + if not all_descendents: + return False + + # Check if there are any shapes at all + shapes = cmds.ls(all_descendents, shapes=True, noIntermediate=True) + if not shapes: + return False + + return True + + +class ValidateNoNullTransforms(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure no null transforms are in the scene. + + Warning: + Transforms with only intermediate shapes are also considered null + transforms. These transform nodes could potentially be used in your + construction history, so take care when automatically fixing this or + when deleting the empty transforms manually. + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['model'] + label = 'No Empty/Null Transforms' + actions = [RepairAction, + ayon_maya.api.action.SelectInvalidAction] + optional = False + + @staticmethod + def get_invalid(instance): + """Return invalid transforms in instance""" + + transforms = cmds.ls(instance, type='transform', long=True) + + invalid = [] + for transform in transforms: + if not has_shape_children(transform): + invalid.append(transform) + + return invalid + + def process(self, instance): + """Process all the transform nodes in the instance """ + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Empty transforms found without shapes:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Empty transforms" + ) + + @classmethod + def repair(cls, instance): + """Delete all null transforms. + + Note: If the node is used elsewhere (eg. connection to attributes or + in history) deletion might mess up things. + + """ + invalid = cls.get_invalid(instance) + if invalid: + cmds.delete(invalid) diff --git a/client/ayon_maya/plugins/publish/validate_no_unknown_nodes.py b/client/ayon_maya/plugins/publish/validate_no_unknown_nodes.py new file mode 100644 index 00000000..8b4d8730 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_no_unknown_nodes.py @@ -0,0 +1,54 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateNoUnknownNodes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Checks to see if there are any unknown nodes in the instance. + + This often happens if nodes from plug-ins are used but are not available + on this machine. + + Note: Some studios use unknown nodes to store data on (as attributes) + because it's a lightweight node. + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['model', 'rig'] + optional = True + label = "Unknown Nodes" + actions = [ayon_maya.api.action.SelectInvalidAction] + + @staticmethod + def get_invalid(instance): + return cmds.ls(instance, type='unknown') + + def process(self, instance): + """Process all the nodes in the instance""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Unknown nodes found:\n\n{0}".format( + _as_report_list(sorted(invalid)) + ), + title="Unknown nodes" + ) diff --git a/client/ayon_maya/plugins/publish/validate_no_vraymesh.py b/client/ayon_maya/plugins/publish/validate_no_vraymesh.py new file mode 100644 index 00000000..2d59608e --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_no_vraymesh.py @@ -0,0 +1,46 @@ +import pyblish.api +from maya import cmds +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) + +def _as_report_list(values, prefix="- ", suffix="\n"): + """Return list as bullet point list for a report""" + if not values: + return "" + return prefix + (suffix + prefix).join(values) + + +class ValidateNoVRayMesh(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate there are no VRayMesh objects in the instance""" + + order = pyblish.api.ValidatorOrder + label = 'No V-Ray Proxies (VRayMesh)' + families = ["pointcache"] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): + return + + shapes = cmds.ls(instance, + shapes=True, + type="mesh") + + inputs = cmds.listConnections(shapes, + destination=False, + source=True) or [] + vray_meshes = cmds.ls(inputs, type='VRayMesh') + if vray_meshes: + raise PublishValidationError( + "Meshes that are V-Ray Proxies should not be in an Alembic " + "pointcache.\n" + "Found V-Ray proxies:\n\n{}".format( + _as_report_list(sorted(vray_meshes)) + ), + title="V-Ray Proxies in pointcache" + ) diff --git a/client/ayon_maya/plugins/publish/validate_node_ids.py b/client/ayon_maya/plugins/publish/validate_node_ids.py new file mode 100644 index 00000000..810dbc4f --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_node_ids.py @@ -0,0 +1,67 @@ +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidatePipelineOrder, + PublishXmlValidationError +) +import ayon_maya.api.action +from ayon_maya.api import lib + + +class ValidateNodeIDs(pyblish.api.InstancePlugin): + """Validate nodes have a Colorbleed Id. + + When IDs are missing from nodes *save your scene* and they should be + automatically generated because IDs are created on non-referenced nodes + in Maya upon scene save. + + """ + + order = ValidatePipelineOrder + label = 'Instance Nodes Have ID' + hosts = ['maya'] + families = ["model", + "look", + "rig", + "pointcache", + "animation", + "yetiRig", + "assembly"] + + actions = [ayon_maya.api.action.SelectInvalidAction, + ayon_maya.api.action.GenerateUUIDsOnInvalidAction] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + """Process all meshes""" + + # Ensure all nodes have a cbId + invalid = self.get_invalid(instance) + if invalid: + names = "\n".join( + "- {}".format(node) for node in invalid + ) + raise PublishXmlValidationError( + plugin=self, + message="Nodes found without IDs: {}".format(invalid), + formatting_data={"nodes": names} + ) + + @classmethod + def get_invalid(cls, instance): + """Return the member nodes that are invalid""" + + # We do want to check the referenced nodes as it might be + # part of the end product. + id_nodes = lib.get_id_required_nodes(referenced_nodes=True, + nodes=instance[:], + # Exclude those with already + # existing ids + existing_ids=False) + return id_nodes diff --git a/client/ayon_maya/plugins/publish/validate_node_ids_deformed_shapes.py b/client/ayon_maya/plugins/publish/validate_node_ids_deformed_shapes.py new file mode 100644 index 00000000..4fc8f776 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -0,0 +1,78 @@ +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + PublishValidationError, RepairAction, ValidateContentsOrder) + + +class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): + """Validate if deformed shapes have related IDs to the original shapes. + + When a deformer is applied in the scene on a referenced mesh that already + had deformers then Maya will create a new shape node for the mesh that + does not have the original id. This validator checks whether the ids are + valid on all the shape nodes in the instance. + + """ + + order = ValidateContentsOrder + families = ['look'] + hosts = ['maya'] + label = 'Deformed shape ids' + actions = [ + ayon_maya.api.action.SelectInvalidAction, + RepairAction + ] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + """Process all the nodes in the instance""" + + # Ensure all nodes have a cbId and a related ID to the original shapes + # if a deformer has been created on the shape + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + ("Shapes found that are considered 'Deformed'" + "without object ids: {0}").format(invalid)) + + @classmethod + def get_invalid(cls, instance): + """Get all nodes which do not match the criteria""" + + shapes = cmds.ls(instance[:], + dag=True, + leaf=True, + shapes=True, + long=True, + noIntermediate=True) + + invalid = [] + for shape in shapes: + history_id = lib.get_id_from_sibling(shape) + if history_id: + current_id = lib.get_id(shape) + if current_id != history_id: + invalid.append(shape) + + return invalid + + @classmethod + def repair(cls, instance): + + for node in cls.get_invalid(instance): + # Get the original id from history + history_id = lib.get_id_from_sibling(node) + if not history_id: + cls.log.error("Could not find ID in history for '%s'", node) + continue + + lib.set_id(node, history_id, overwrite=True) diff --git a/client/ayon_maya/plugins/publish/validate_node_ids_in_database.py b/client/ayon_maya/plugins/publish/validate_node_ids_in_database.py new file mode 100644 index 00000000..a1fde89f --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_node_ids_in_database.py @@ -0,0 +1,101 @@ +import pyblish.api +import ayon_api + +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + PublishValidationError, ValidatePipelineOrder) + + +class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): + """Validate if the CB Id is related to an folder in the database + + All nodes with the `cbId` attribute will be validated to ensure that + the loaded asset in the scene is related to the current project. + + Tip: If there is an asset which is being reused from a different project + please ensure the asset is republished in the new project + + """ + + order = ValidatePipelineOrder + label = 'Node Ids in Database' + hosts = ['maya'] + families = ["*"] + + actions = [ayon_maya.api.action.SelectInvalidAction, + ayon_maya.api.action.GenerateUUIDsOnInvalidAction] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Found folder ids which are not related to " + "current project in instance: `{}`".format(instance.name)) + + @classmethod + def get_invalid(cls, instance): + + nodes = instance[:] + if not nodes: + return + + # Get all id required nodes + id_required_nodes = lib.get_id_required_nodes(referenced_nodes=False, + nodes=nodes) + if not id_required_nodes: + return + + # check ids against database ids + folder_ids = cls.get_project_folder_ids(context=instance.context) + + # Get all asset IDs + invalid = [] + for node in id_required_nodes: + cb_id = lib.get_id(node) + + # Ignore nodes without id, those are validated elsewhere + if not cb_id: + continue + + folder_id = cb_id.split(":", 1)[0] + if folder_id not in folder_ids: + cls.log.error("`%s` has unassociated folder id" % node) + invalid.append(node) + + return invalid + + @classmethod + def get_project_folder_ids(cls, context): + """Return all folder ids in the current project. + + Arguments: + context (pyblish.api.Context): The publish context. + + Returns: + set[str]: All folder ids in the current project. + + """ + # We query the database only for the first instance instead of + # per instance by storing a cache in the context + key = "__cache_project_folder_ids" + if key in context.data: + return context.data[key] + + # check ids against database + project_name = context.data["projectName"] + folder_entities = ayon_api.get_folders(project_name, fields={"id"}) + folder_ids = { + folder_entity["id"] + for folder_entity in folder_entities + } + + context.data[key] = folder_ids + return folder_ids diff --git a/client/ayon_maya/plugins/publish/validate_node_ids_related.py b/client/ayon_maya/plugins/publish/validate_node_ids_related.py new file mode 100644 index 00000000..0ad497d1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_node_ids_related.py @@ -0,0 +1,122 @@ +import inspect +import uuid +from collections import defaultdict +import pyblish.api + +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, PublishValidationError, ValidatePipelineOrder) +from ayon_api import get_folders + + +def is_valid_uuid(value) -> bool: + """Return whether value is a valid UUID""" + try: + uuid.UUID(value) + except ValueError: + return False + return True + + +class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate nodes have a related `cbId` to the instance.data[folderPath]""" + + order = ValidatePipelineOrder + label = 'Node Ids Related (ID)' + hosts = ['maya'] + families = ["model", + "look", + "rig"] + optional = True + + actions = [ayon_maya.api.action.SelectInvalidAction, + ayon_maya.api.action.GenerateUUIDsOnInvalidAction] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + """Process all nodes in instance (including hierarchy)""" + if not self.is_active(instance.data): + return + + # Ensure all nodes have a cbId + invalid = self.get_invalid(instance) + if invalid: + + invalid_list = "\n".join(f"- {node}" for node in sorted(invalid)) + + raise PublishValidationError(( + "Nodes IDs found that are not related to folder '{}':\n{}" + ).format(instance.data["folderPath"], invalid_list), + description=self.get_description() + ) + + @classmethod + def get_invalid(cls, instance): + """Return the member nodes that are invalid""" + folder_id = instance.data["folderEntity"]["id"] + + # We do want to check the referenced nodes as it might be + # part of the end product + invalid = list() + nodes_by_other_folder_ids = defaultdict(set) + for node in instance: + _id = lib.get_id(node) + if not _id: + continue + + node_folder_id = _id.split(":", 1)[0] + if node_folder_id != folder_id: + invalid.append(node) + nodes_by_other_folder_ids[node_folder_id].add(node) + + # Log what other assets were found. + if nodes_by_other_folder_ids: + project_name = instance.context.data["projectName"] + other_folder_ids = set(nodes_by_other_folder_ids.keys()) + + # Remove folder ids that are not valid UUID identifiers, these + # may be legacy OpenPype ids + other_folder_ids = {folder_id for folder_id in other_folder_ids + if is_valid_uuid(folder_id)} + if not other_folder_ids: + return invalid + + folder_entities = get_folders(project_name=project_name, + folder_ids=other_folder_ids, + fields=["path"]) + if folder_entities: + # Log names of other assets detected + # We disregard logging nodes/ids for asset ids where no asset + # was found in the database because ValidateNodeIdsInDatabase + # takes care of that. + folder_paths = {entity["path"] for entity in folder_entities} + cls.log.error( + "Found nodes related to other folders:\n{}".format( + "\n".join(f"- {path}" for path in sorted(folder_paths)) + ) + ) + + return invalid + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Node IDs must match folder id + + The node ids must match the folder entity id you are publishing to. + + Usually these mismatch occurs if you are re-using nodes from another + folder or project. + + #### How to repair? + + The repair action will regenerate new ids for + the invalid nodes to match the instance's folder. + """) diff --git a/client/ayon_maya/plugins/publish/validate_node_ids_unique.py b/client/ayon_maya/plugins/publish/validate_node_ids_unique.py new file mode 100644 index 00000000..ae04a486 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_node_ids_unique.py @@ -0,0 +1,81 @@ +from collections import defaultdict + +import pyblish.api +from ayon_core.pipeline.publish import ( + ValidatePipelineOrder, + PublishValidationError +) +import ayon_maya.api.action +from ayon_maya.api import lib + +from maya import cmds + + +class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): + """Validate the nodes in the instance have a unique Colorbleed Id + + Here we ensure that what has been added to the instance is unique + """ + + order = ValidatePipelineOrder + label = 'Non Duplicate Instance Members (ID)' + hosts = ['maya'] + families = ["model", + "look", + "rig", + "yetiRig"] + + actions = [ayon_maya.api.action.SelectInvalidAction, + ayon_maya.api.action.GenerateUUIDsOnInvalidAction] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + """Process all meshes""" + + # Ensure all nodes have a cbId + invalid = self.get_invalid(instance) + if invalid: + label = "Nodes found with non-unique folder ids" + raise PublishValidationError( + message="{}, see log".format(label), + title="Non-unique folder ids on nodes", + description="{}\n- {}".format(label, + "\n- ".join(sorted(invalid))) + ) + + @classmethod + def get_invalid(cls, instance): + """Return the member nodes that are invalid""" + + # Check only non intermediate shapes + # todo: must the instance itself ensure to have no intermediates? + # todo: how come there are intermediates? + instance_members = cmds.ls(instance, noIntermediate=True, long=True) + + # Collect each id with their members + ids = defaultdict(list) + for member in instance_members: + object_id = lib.get_id(member) + if not object_id: + continue + ids[object_id].append(member) + + # Take only the ids with more than one member + invalid = list() + for members in ids.values(): + if len(members) > 1: + members_text = "\n".join( + "- {}".format(member) for member in sorted(members) + ) + cls.log.error( + "ID found on multiple nodes:\n{}".format(members_text) + ) + invalid.extend(members) + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_node_no_ghosting.py b/client/ayon_maya/plugins/publish/validate_node_no_ghosting.py new file mode 100644 index 00000000..1220282d --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_node_no_ghosting.py @@ -0,0 +1,61 @@ +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateNodeNoGhosting(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure nodes do not have ghosting enabled. + + If one would publish towards a non-Maya format it's likely that stats + like ghosting won't be exported, eg. exporting to Alembic. + + Instead of creating many micro-managing checks (like this one) to ensure + attributes have not been changed from their default it could be more + efficient to export to a format that will never hold such data anyway. + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['model', 'rig'] + label = "No Ghosting" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + _attributes = {'ghosting': 0} + + @classmethod + def get_invalid(cls, instance): + + # Transforms and shapes seem to have ghosting + nodes = cmds.ls(instance, long=True, type=['transform', 'shape']) + invalid = [] + for node in nodes: + _iteritems = getattr( + cls._attributes, "iteritems", cls._attributes.items + ) + for attr, required_value in _iteritems(): + if cmds.attributeQuery(attr, node=node, exists=True): + + value = cmds.getAttr('{0}.{1}'.format(node, attr)) + if value != required_value: + invalid.append(node) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Nodes with ghosting enabled found: {0}".format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_plugin_path_attributes.py b/client/ayon_maya/plugins/publish/validate_plugin_path_attributes.py new file mode 100644 index 00000000..ef6f9737 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_plugin_path_attributes.py @@ -0,0 +1,82 @@ +import os + +from maya import cmds + +import pyblish.api + +from ayon_maya.api.lib import pairwise +from ayon_maya.api.action import SelectInvalidAction +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidatePluginPathAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """ + Validate plug-in path attributes point to existing file paths. + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ["workfile"] + label = "Plug-in Path Attributes" + actions = [SelectInvalidAction] + optional = False + + # Attributes are defined in project settings + attribute = [] + + @classmethod + def get_invalid(cls, instance): + invalid = list() + + file_attrs = { + item["name"]: item["value"] + for item in cls.attribute + } + if not file_attrs: + return invalid + + # Consider only valid node types to avoid "Unknown object type" warning + all_node_types = set(cmds.allNodeTypes()) + node_types = [ + key + for key in file_attrs.keys() + if key in all_node_types + ] + + for node, node_type in pairwise(cmds.ls(type=node_types, + showType=True)): + # get the filepath + file_attr = "{}.{}".format(node, file_attrs[node_type]) + filepath = cmds.getAttr(file_attr) + + if filepath and not os.path.exists(filepath): + cls.log.error("{} '{}' uses non-existing filepath: {}" + .format(node_type, node, filepath)) + invalid.append(node) + + return invalid + + def process(self, instance): + """Process all directories Set as Filenames in Non-Maya Nodes""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + title="Plug-in Path Attributes", + message="Non-existent filepath found on nodes: {}".format( + ", ".join(invalid) + ), + description=( + "## Plug-in nodes use invalid filepaths\n" + "The workfile contains nodes from plug-ins that use " + "filepaths which do not exist.\n\n" + "Please make sure their filepaths are correct and the " + "files exist on disk." + ) + ) diff --git a/client/ayon_maya/plugins/publish/validate_render_image_rule.py b/client/ayon_maya/plugins/publish/validate_render_image_rule.py new file mode 100644 index 00000000..117f7df8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_render_image_rule.py @@ -0,0 +1,76 @@ +import os + +import pyblish.api + +from maya import cmds + +from ayon_core.pipeline.publish import ( + PublishValidationError, + RepairAction, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) + + +class ValidateRenderImageRule(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates Maya Workpace "images" file rule matches project settings. + + This validates against the configured default render image folder: + Studio Settings > Project > Maya > + Render Settings > Default render image folder. + + """ + + order = ValidateContentsOrder + label = "Images File Rule (Workspace)" + hosts = ["maya"] + families = ["renderlayer"] + actions = [RepairAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + required_images_rule = os.path.normpath( + self.get_default_render_image_folder(instance) + ) + current_images_rule = os.path.normpath( + cmds.workspace(fileRuleEntry="images") + ) + + if current_images_rule != required_images_rule: + raise PublishValidationError( + ( + "Invalid workspace `images` file rule value: '{}'. " + "Must be set to: '{}'" + ).format(current_images_rule, required_images_rule)) + + @classmethod + def repair(cls, instance): + + required_images_rule = cls.get_default_render_image_folder(instance) + current_images_rule = cmds.workspace(fileRuleEntry="images") + + if current_images_rule != required_images_rule: + cmds.workspace(fileRule=("images", required_images_rule)) + cmds.workspace(saveWorkspace=True) + + @classmethod + def get_default_render_image_folder(cls, instance): + staging_dir = instance.data.get("stagingDir") + if staging_dir: + cls.log.debug( + "Staging dir found: \"{}\". Ignoring setting from " + "`project_settings/maya/render_settings/" + "default_render_image_folder`.".format(staging_dir) + ) + return staging_dir + + return ( + instance.context.data + ["project_settings"] + ["maya"] + ["render_settings"] + ["default_render_image_folder"] + ) diff --git a/client/ayon_maya/plugins/publish/validate_render_no_default_cameras.py b/client/ayon_maya/plugins/publish/validate_render_no_default_cameras.py new file mode 100644 index 00000000..1692b909 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_render_no_default_cameras.py @@ -0,0 +1,45 @@ +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure no default (startup) cameras are to be rendered.""" + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['renderlayer'] + label = "No Default Cameras Renderable" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + @staticmethod + def get_invalid(instance): + + renderable = set(instance.data["cameras"]) + + # Collect default cameras + cameras = cmds.ls(type='camera', long=True) + defaults = set(cam for cam in cameras if + cmds.camera(cam, query=True, startupCamera=True)) + + return [cam for cam in renderable if cam in defaults] + + def process(self, instance): + """Process all the cameras in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + title="Rendering default cameras", + message="Renderable default cameras " + "found: {0}".format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_render_single_camera.py b/client/ayon_maya/plugins/publish/validate_render_single_camera.py new file mode 100644 index 00000000..cb03e686 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_render_single_camera.py @@ -0,0 +1,84 @@ +import re +import inspect + +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_maya.api.lib_rendersettings import RenderSettings +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate renderable camera count for layer and token. + + Pipeline is supporting multiple renderable cameras per layer, but image + prefix must contain token. + """ + + order = ValidateContentsOrder + label = "Render Single Camera" + hosts = ['maya'] + families = ["renderlayer", + "vrayscene"] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + R_CAMERA_TOKEN = re.compile(r'%c|', re.IGNORECASE) + + def process(self, instance): + """Process all the cameras in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Invalid render cameras.", + description=self.get_description() + ) + + @classmethod + def get_invalid(cls, instance): + + cameras = instance.data.get("cameras", []) + renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower() + # handle various renderman names + if renderer.startswith('renderman'): + renderer = 'renderman' + + file_prefix = cmds.getAttr( + RenderSettings.get_image_prefix_attr(renderer) + ) + + renderlayer = instance.data["renderlayer"] + if len(cameras) > 1: + if re.search(cls.R_CAMERA_TOKEN, file_prefix): + # if there is token in prefix and we have more then + # 1 camera, all is ok. + return + cls.log.error( + "Multiple renderable cameras found for %s: %s ", + renderlayer, ", ".join(cameras)) + return [renderlayer] + cameras + + elif len(cameras) < 1: + cls.log.error("No renderable cameras found for %s ", renderlayer) + return [renderlayer] + + def get_description(self): + return inspect.cleandoc( + """### Render Cameras Invalid + + Your render cameras are misconfigured. You may have no render + camera set or have multiple cameras with a render filename + prefix that does not include the `` token. + + See the logs for more details about the cameras. + + """ + ) diff --git a/client/ayon_maya/plugins/publish/validate_renderlayer_aovs.py b/client/ayon_maya/plugins/publish/validate_renderlayer_aovs.py new file mode 100644 index 00000000..92d97776 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_renderlayer_aovs.py @@ -0,0 +1,65 @@ +import ayon_api +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) + +class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate created AOVs / RenderElement is registered in the database + + Each render element is registered as a product which is formatted based on + the render layer and the render element, example: + + . + + This translates to something like this: + + CHAR.diffuse + + This check is needed to ensure the render output is still complete + + """ + + order = pyblish.api.ValidatorOrder + 0.1 + label = "Render Passes / AOVs Are Registered" + hosts = ["maya"] + families = ["renderlayer"] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Found unregistered products: {}".format(invalid)) + + def get_invalid(self, instance): + invalid = [] + + project_name = instance.context.data["projectName"] + folder_entity = instance.data["folderEntity"] + render_passes = instance.data.get("renderPasses", []) + for render_pass in render_passes: + is_valid = self.validate_product_registered( + project_name, folder_entity, render_pass + ) + if not is_valid: + invalid.append(render_pass) + + return invalid + + def validate_product_registered( + self, project_name, folder_entity, product_name + ): + """Check if product is registered in the database under the folder""" + + return ayon_api.get_product_by_name( + project_name, product_name, folder_entity["id"], fields={"id"} + ) diff --git a/client/ayon_maya/plugins/publish/validate_rendersettings.py b/client/ayon_maya/plugins/publish/validate_rendersettings.py new file mode 100644 index 00000000..d5a9ea77 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rendersettings.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +"""Maya validator for render settings.""" +import re +from collections import OrderedDict + +from maya import cmds, mel + +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_maya.api import lib +from ayon_maya.api.lib_rendersettings import RenderSettings + + +def convert_to_int_or_float(string_value): + # Order of types are important here since float can convert string + # representation of integer. + types = [int, float] + for t in types: + try: + result = t(string_value) + except ValueError: + continue + else: + return result + + # Neither integer or float. + return string_value + + +def get_redshift_image_format_labels(): + """Return nice labels for Redshift image formats.""" + var = "$g_redshiftImageFormatLabels" + return mel.eval("{0}={0}".format(var)) + + +class ValidateRenderSettings(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates the global render settings + + * File Name Prefix must start with: `` + all other token are customizable but sane values for Arnold are: + + `//_` + + token is supported also, useful for multiple renderable + cameras per render layer. + + For Redshift omit token. Redshift will append it + automatically if AOVs are enabled and if you user Multipart EXR + it doesn't make much sense. + + * Frame Padding must be: + * default: 4 + + * Animation must be toggled on, in Render Settings - Common tab: + * vray: Animation on standard of specific + * arnold: Frame / Animation ext: Any choice without "(Single Frame)" + * redshift: Animation toggled on + + NOTE: + The repair function of this plugin does not repair the animation + setting of the render settings due to multiple possibilities. + + """ + + order = ValidateContentsOrder + label = "Validate Render Settings" + hosts = ["maya"] + families = ["renderlayer"] + actions = [RepairAction] + optional = True + + ImagePrefixes = { + 'mentalray': 'defaultRenderGlobals.imageFilePrefix', + 'vray': 'vraySettings.fileNamePrefix', + 'arnold': 'defaultRenderGlobals.imageFilePrefix', + 'renderman': 'rmanGlobals.imageFileFormat', + 'redshift': 'defaultRenderGlobals.imageFilePrefix', + 'mayahardware2': 'defaultRenderGlobals.imageFilePrefix', + } + + ImagePrefixTokens = { + 'mentalray': '//{aov_separator}', # noqa: E501 + 'arnold': '//{aov_separator}', # noqa: E501 + 'redshift': '//', + 'vray': '//', + 'renderman': '{aov_separator}..', + 'mayahardware2': '//', + } + + _aov_chars = { + "dot": ".", + "dash": "-", + "underscore": "_" + } + + redshift_AOV_prefix = "/{aov_separator}" # noqa: E501 + + renderman_dir_prefix = "/" + + R_AOV_TOKEN = re.compile( + r'%a||', re.IGNORECASE) + R_LAYER_TOKEN = re.compile( + r'%l||', re.IGNORECASE) + R_CAMERA_TOKEN = re.compile(r'%c|Camera>') + R_SCENE_TOKEN = re.compile(r'%s|', re.IGNORECASE) + + DEFAULT_PADDING = 4 + VRAY_PREFIX = "//" + DEFAULT_PREFIX = "//_" + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + title="Invalid Render Settings", + message=("Invalid render settings found " + "for '{}'!".format(instance.name)) + ) + + @classmethod + def get_invalid(cls, instance): + + invalid = False + + renderer = instance.data['renderer'] + layer = instance.data['renderlayer'] + cameras = instance.data.get("cameras", []) + + # Prefix attribute can return None when a value was never set + prefix = lib.get_attr_in_layer(cls.ImagePrefixes[renderer], + layer=layer) or "" + padding = lib.get_attr_in_layer( + attr=RenderSettings.get_padding_attr(renderer), + layer=layer + ) + + anim_override = lib.get_attr_in_layer("defaultRenderGlobals.animation", + layer=layer) + + prefix = prefix.replace( + "{aov_separator}", instance.data.get("aovSeparator", "_")) + + default_prefix = cls.ImagePrefixTokens[renderer] + + if not anim_override: + invalid = True + cls.log.error("Animation needs to be enabled. Use the same " + "frame for start and end to render single frame") + + if not re.search(cls.R_LAYER_TOKEN, prefix): + invalid = True + cls.log.error("Wrong image prefix [ {} ] - " + "doesn't have: '' or " + "'' token".format(prefix)) + + if len(cameras) > 1 and not re.search(cls.R_CAMERA_TOKEN, prefix): + invalid = True + cls.log.error("Wrong image prefix [ {} ] - " + "doesn't have: '' token".format(prefix)) + cls.log.error( + "Note that to needs to have capital 'C' at the beginning") + + # renderer specific checks + if renderer == "vray": + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + node = cmds.createNode("VRaySettingsNode") + else: + node = vray_settings[0] + + scene_sep = cmds.getAttr( + "{}.fileNameRenderElementSeparator".format(node)) + if scene_sep != instance.data.get("aovSeparator", "_"): + cls.log.error("AOV separator is not set correctly.") + invalid = True + + if renderer == "redshift": + redshift_AOV_prefix = cls.redshift_AOV_prefix.replace( + "{aov_separator}", instance.data.get("aovSeparator", "_") + ) + if re.search(cls.R_AOV_TOKEN, prefix): + invalid = True + cls.log.error(("Do not use AOV token [ {} ] - " + "Redshift is using image prefixes per AOV so " + "it doesn't make much sense using it in global" + "image prefix").format(prefix)) + # get redshift AOVs + rs_aovs = cmds.ls(type="RedshiftAOV", referencedNodes=False) + for aov in rs_aovs: + aov_prefix = cmds.getAttr("{}.filePrefix".format(aov)) + # check their image prefix + if aov_prefix != redshift_AOV_prefix: + cls.log.error(("AOV ({}) image prefix is not set " + "correctly {} != {}").format( + cmds.getAttr("{}.name".format(aov)), + aov_prefix, + redshift_AOV_prefix + )) + invalid = True + + # check aov file format + aov_ext = cmds.getAttr("{}.fileFormat".format(aov)) + default_ext = cmds.getAttr("redshiftOptions.imageFormat") + aov_type = cmds.getAttr("{}.aovType".format(aov)) + if aov_type == "Cryptomatte": + # redshift Cryptomatte AOV always uses "Cryptomatte (EXR)" + # so we ignore validating file format for it. + pass + + elif default_ext != aov_ext: + labels = get_redshift_image_format_labels() + cls.log.error( + "AOV file format {} does not match global file format " + "{}".format(labels[aov_ext], labels[default_ext]) + ) + invalid = True + + if renderer == "renderman": + file_prefix = cmds.getAttr("rmanGlobals.imageFileFormat") + dir_prefix = cmds.getAttr("rmanGlobals.imageOutputDir") + + if file_prefix.lower() != prefix.lower(): + invalid = True + cls.log.error("Wrong image prefix [ {} ]".format(file_prefix)) + + if dir_prefix.lower() != cls.renderman_dir_prefix.lower(): + invalid = True + cls.log.error("Wrong directory prefix [ {} ]".format( + dir_prefix)) + + if renderer == "arnold": + multipart = cmds.getAttr("defaultArnoldDriver.mergeAOVs") + if multipart: + if re.search(cls.R_AOV_TOKEN, prefix): + invalid = True + cls.log.error("Wrong image prefix [ {} ] - " + "You can't use '' token " + "with merge AOVs turned on".format(prefix)) + default_prefix = re.sub( + cls.R_AOV_TOKEN, "", default_prefix) + # remove aov token from prefix to pass validation + default_prefix = default_prefix.split("{aov_separator}")[0] + elif not re.search(cls.R_AOV_TOKEN, prefix): + invalid = True + cls.log.error("Wrong image prefix [ {} ] - " + "doesn't have: '' or " + "token".format(prefix)) + + default_prefix = default_prefix.replace( + "{aov_separator}", instance.data.get("aovSeparator", "_")) + if prefix.lower() != default_prefix.lower(): + cls.log.warning("warning: prefix differs from " + "recommended {}".format( + default_prefix)) + + if padding != cls.DEFAULT_PADDING: + invalid = True + cls.log.error("Expecting padding of {} ( {} )".format( + cls.DEFAULT_PADDING, "0" * cls.DEFAULT_PADDING)) + + # load validation definitions from settings + settings_lights_flag = instance.context.data["project_settings"].get( + "maya", {}).get( + "render_settings", {}).get( + "enable_all_lights", False) + + instance_lights_flag = instance.data.get("renderSetupIncludeLights") + if settings_lights_flag != instance_lights_flag: + cls.log.warning( + "Instance flag for \"Render Setup Include Lights\" is set to " + "{} and Settings flag is set to {}".format( + instance_lights_flag, settings_lights_flag + ) + ) + + # go through definitions and test if such node.attribute exists. + # if so, compare its value from the one required. + for data in cls.get_nodes(instance, renderer): + for node in data["nodes"]: + # Why is captured 'PublishValidationError'? How it can be + # raised by 'cmds.getAttr(...)'? + try: + render_value = cmds.getAttr( + "{}.{}".format(node, data["attribute"]) + ) + except PublishValidationError: + invalid = True + cls.log.error( + "Cannot get value of {}.{}".format( + node, data["attribute"] + ) + ) + else: + if render_value not in data["values"]: + invalid = True + cls.log.error( + "Invalid value {} set on {}.{}. Expecting " + "{}".format( + render_value, + node, + data["attribute"], + data["values"] + ) + ) + + return invalid + + @classmethod + def get_nodes(cls, instance, renderer): + maya_settings = instance.context.data["project_settings"]["maya"] + renderer_key = "{}_render_attributes".format(renderer) + validation_settings = ( + maya_settings["publish"]["ValidateRenderSettings"].get( + renderer_key + ) + ) or [] + validation_settings = [ + (item["type"], item["value"]) + for item in validation_settings + ] + result = [] + for attr, values in OrderedDict(validation_settings).items(): + values = [convert_to_int_or_float(v) for v in values if v] + + # Validate the settings has values. + if not values: + cls.log.error( + "Settings for {} is missing values.".format(attr) + ) + continue + + cls.log.debug("{}: {}".format(attr, values)) + if "." not in attr: + cls.log.warning( + "Skipping invalid attribute defined in validation " + "settings: \"{}\"".format(attr) + ) + continue + + node_type, attribute_name = attr.split(".", 1) + + # first get node of that type + nodes = cmds.ls(type=node_type) + + if not nodes: + cls.log.warning( + "No nodes of type \"{}\" found.".format(node_type) + ) + continue + + result.append( + { + "attribute": attribute_name, + "nodes": nodes, + "values": values + } + ) + + return result + + @classmethod + def repair(cls, instance): + renderer = instance.data['renderer'] + layer_node = instance.data['setMembers'] + redshift_AOV_prefix = cls.redshift_AOV_prefix.replace( + "{aov_separator}", instance.data.get("aovSeparator", "_") + ) + default_prefix = cls.ImagePrefixTokens[renderer].replace( + "{aov_separator}", instance.data.get("aovSeparator", "_") + ) + + for data in cls.get_nodes(instance, renderer): + if not data["values"]: + continue + for node in data["nodes"]: + lib.set_attribute(data["attribute"], data["values"][0], node) + with lib.renderlayer(layer_node): + + # Repair animation must be enabled + cmds.setAttr("defaultRenderGlobals.animation", True) + + # Repair prefix + if renderer == "arnold": + multipart = cmds.getAttr("defaultArnoldDriver.mergeAOVs") + if multipart: + separator_variations = [ + "_", + "_", + "", + ] + for variant in separator_variations: + default_prefix = default_prefix.replace(variant, "") + + if renderer != "renderman": + prefix_attr = RenderSettings.get_image_prefix_attr(renderer) + fname_prefix = default_prefix + cmds.setAttr(prefix_attr, fname_prefix, type="string") + + # Repair padding + padding_attr = RenderSettings.get_padding_attr(renderer) + cmds.setAttr(padding_attr, cls.DEFAULT_PADDING) + else: + # renderman handles stuff differently + cmds.setAttr("rmanGlobals.imageFileFormat", + default_prefix, + type="string") + cmds.setAttr("rmanGlobals.imageOutputDir", + cls.renderman_dir_prefix, + type="string") + + if renderer == "vray": + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + node = cmds.createNode("VRaySettingsNode") + else: + node = vray_settings[0] + + cmds.optionMenuGrp("vrayRenderElementSeparator", + v=instance.data.get("aovSeparator", "_")) + cmds.setAttr( + "{}.fileNameRenderElementSeparator".format(node), + instance.data.get("aovSeparator", "_"), + type="string" + ) + + if renderer == "redshift": + # get redshift AOVs + rs_aovs = cmds.ls(type="RedshiftAOV", referencedNodes=False) + for aov in rs_aovs: + # fix AOV prefixes + cmds.setAttr( + "{}.filePrefix".format(aov), + redshift_AOV_prefix, type="string") + # fix AOV file format + default_ext = cmds.getAttr( + "redshiftOptions.imageFormat", asString=True) + cmds.setAttr( + "{}.fileFormat".format(aov), default_ext) diff --git a/client/ayon_maya/plugins/publish/validate_resolution.py b/client/ayon_maya/plugins/publish/validate_resolution.py new file mode 100644 index 00000000..aae3940e --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_resolution.py @@ -0,0 +1,107 @@ +import pyblish.api +from ayon_core.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from maya import cmds +from ayon_core.pipeline.publish import RepairAction +from ayon_maya.api import lib +from ayon_maya.api.lib import reset_scene_resolution + + +class ValidateResolution(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate the render resolution setting aligned with DB""" + + order = pyblish.api.ValidatorOrder + families = ["renderlayer"] + hosts = ["maya"] + label = "Validate Resolution" + actions = [RepairAction] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid_resolution(instance) + if invalid: + raise PublishValidationError( + "Render resolution is invalid. See log for details.", + description=( + "Wrong render resolution setting. " + "Please use repair button to fix it.\n\n" + "If current renderer is V-Ray, " + "make sure vraySettings node has been created." + ) + ) + + @classmethod + def get_invalid_resolution(cls, instance): + width, height, pixelAspect = cls.get_folder_resolution(instance) + current_renderer = instance.data["renderer"] + layer = instance.data["renderlayer"] + invalid = False + if current_renderer == "vray": + vray_node = "vraySettings" + if cmds.objExists(vray_node): + current_width = lib.get_attr_in_layer( + "{}.width".format(vray_node), layer=layer) + current_height = lib.get_attr_in_layer( + "{}.height".format(vray_node), layer=layer) + current_pixelAspect = lib.get_attr_in_layer( + "{}.pixelAspect".format(vray_node), layer=layer + ) + else: + cls.log.error( + "Can't detect VRay resolution because there is no node " + "named: `{}`".format(vray_node) + ) + return True + else: + current_width = lib.get_attr_in_layer( + "defaultResolution.width", layer=layer) + current_height = lib.get_attr_in_layer( + "defaultResolution.height", layer=layer) + current_pixelAspect = lib.get_attr_in_layer( + "defaultResolution.pixelAspect", layer=layer + ) + if current_width != width or current_height != height: + cls.log.error( + "Render resolution {}x{} does not match " + "folder resolution {}x{}".format( + current_width, current_height, + width, height + )) + invalid = True + if current_pixelAspect != pixelAspect: + cls.log.error( + "Render pixel aspect {} does not match " + "folder pixel aspect {}".format( + current_pixelAspect, pixelAspect + )) + invalid = True + return invalid + + @classmethod + def get_folder_resolution(cls, instance): + task_attributes = instance.data["taskEntity"]["attrib"] + width = task_attributes["resolutionWidth"] + height = task_attributes["resolutionHeight"] + pixel_aspect = task_attributes["pixelAspect"] + return int(width), int(height), float(pixel_aspect) + + @classmethod + def repair(cls, instance): + # Usually without renderlayer overrides the renderlayers + # all share the same resolution value - so fixing the first + # will have fixed all the others too. It's much faster to + # check whether it's invalid first instead of switching + # into all layers individually + if not cls.get_invalid_resolution(instance): + cls.log.debug( + "Nothing to repair on instance: {}".format(instance) + ) + return + layer_node = instance.data['setMembers'] + with lib.renderlayer(layer_node): + reset_scene_resolution() diff --git a/client/ayon_maya/plugins/publish/validate_resources.py b/client/ayon_maya/plugins/publish/validate_resources.py new file mode 100644 index 00000000..725e8645 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_resources.py @@ -0,0 +1,60 @@ +import os +from collections import defaultdict + +import pyblish.api +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateResources(pyblish.api.InstancePlugin): + """Validates mapped resources. + + These are external files to the current application, for example + these could be textures, image planes, cache files or other linked + media. + + This validates: + - The resources have unique filenames (without extension) + + """ + + order = ValidateContentsOrder + label = "Resources Unique" + + def process(self, instance): + + resources = instance.data.get("resources", []) + if not resources: + self.log.debug("No resources to validate..") + return + + basenames = defaultdict(set) + + for resource in resources: + files = resource.get("files", []) + for filename in files: + + # Use normalized paths in comparison and ignore case + # sensitivity + filename = os.path.normpath(filename).lower() + + basename = os.path.splitext(os.path.basename(filename))[0] + basenames[basename].add(filename) + + invalid_resources = list() + for basename, sources in basenames.items(): + if len(sources) > 1: + invalid_resources.extend(sources) + + self.log.error( + "Non-unique resource name: {0}" + "{0} (sources: {1})".format( + basename, + list(sources) + ) + ) + + if invalid_resources: + raise PublishValidationError("Invalid resources in instance.") diff --git a/client/ayon_maya/plugins/publish/validate_review.py b/client/ayon_maya/plugins/publish/validate_review.py new file mode 100644 index 00000000..fcfd8513 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_review.py @@ -0,0 +1,30 @@ +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, PublishValidationError +) + + +class ValidateReview(pyblish.api.InstancePlugin): + """Validate review.""" + + order = ValidateContentsOrder + label = "Validate Review" + families = ["review"] + + def process(self, instance): + cameras = instance.data["cameras"] + + # validate required settings + if len(cameras) == 0: + raise PublishValidationError( + "No camera found in review instance: {}".format(instance) + ) + elif len(cameras) > 2: + raise PublishValidationError( + "Only a single camera is allowed for a review instance but " + "more than one camera found in review instance: {}. " + "Cameras found: {}".format(instance, ", ".join(cameras)) + ) + + self.log.debug('camera: {}'.format(instance.data["review_camera"])) diff --git a/client/ayon_maya/plugins/publish/validate_rig_contents.py b/client/ayon_maya/plugins/publish/validate_rig_contents.py new file mode 100644 index 00000000..b8b87288 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rig_contents.py @@ -0,0 +1,260 @@ +import pyblish.api +from maya import cmds +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) + + +class ValidateRigContents(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure rig contains pipeline-critical content + + Every rig must contain at least two object sets: + "controls_SET" - Set of all animatable controls + "out_SET" - Set of all cacheable meshes + + """ + + order = ValidateContentsOrder + label = "Rig Contents" + hosts = ["maya"] + families = ["rig"] + action = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + accepted_output = ["mesh", "transform"] + accepted_controllers = ["transform"] + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Invalid rig content. See log for details.") + + @classmethod + def get_invalid(cls, instance): + + # Find required sets by suffix + required, rig_sets = cls.get_nodes(instance) + + cls.validate_missing_objectsets(instance, required, rig_sets) + + controls_set = rig_sets["controls_SET"] + out_set = rig_sets["out_SET"] + + # Ensure contents in sets and retrieve long path for all objects + output_content = cmds.sets(out_set, query=True) or [] + if not output_content: + raise PublishValidationError("Must have members in rig out_SET") + output_content = cmds.ls(output_content, long=True) + + controls_content = cmds.sets(controls_set, query=True) or [] + if not controls_content: + raise PublishValidationError( + "Must have members in rig controls_SET" + ) + controls_content = cmds.ls(controls_content, long=True) + + rig_content = output_content + controls_content + invalid_hierarchy = cls.invalid_hierarchy(instance, rig_content) + + # Additional validations + invalid_geometry = cls.validate_geometry(output_content) + invalid_controls = cls.validate_controls(controls_content) + + error = False + if invalid_hierarchy: + cls.log.error("Found nodes which reside outside of root group " + "while they are set up for publishing." + "\n%s" % invalid_hierarchy) + error = True + + if invalid_controls: + cls.log.error("Only transforms can be part of the controls_SET." + "\n%s" % invalid_controls) + error = True + + if invalid_geometry: + cls.log.error("Only meshes can be part of the out_SET\n%s" + % invalid_geometry) + error = True + if error: + return invalid_hierarchy + invalid_controls + invalid_geometry + + @classmethod + def validate_missing_objectsets(cls, instance, + required_objsets, rig_sets): + """Validate missing objectsets in rig sets + + Args: + instance (pyblish.api.Instance): instance + required_objsets (list[str]): list of objectset names + rig_sets (list[str]): list of rig sets + + Raises: + PublishValidationError: When the error is raised, it will show + which instance has the missing object sets + """ + missing = [ + key for key in required_objsets if key not in rig_sets + ] + if missing: + raise PublishValidationError( + "%s is missing sets: %s" % (instance, ", ".join(missing)) + ) + + @classmethod + def invalid_hierarchy(cls, instance, content): + """ + Check if all rig set members are within the hierarchy of the rig root + + Args: + instance (pyblish.api.Instance): instance + content (list[str]): list of content from rig sets + + Raises: + PublishValidationError: It means no dag nodes in + the rig instance + + Returns: + List[str]: invalid hierarchy + """ + # Ensure there are at least some transforms or dag nodes + # in the rig instance + set_members = instance.data['setMembers'] + if not cmds.ls(set_members, type="dagNode", long=True): + raise PublishValidationError( + "No dag nodes in the rig instance. " + "(Empty instance?)" + ) + # Validate members are inside the hierarchy from root node + root_nodes = cmds.ls(set_members, assemblies=True, long=True) + hierarchy = cmds.listRelatives(root_nodes, allDescendents=True, + fullPath=True) + root_nodes + hierarchy = set(hierarchy) + invalid_hierarchy = [] + for node in content: + if node not in hierarchy: + invalid_hierarchy.append(node) + return invalid_hierarchy + + @classmethod + def validate_geometry(cls, set_members): + """Checks if the node types of the set members valid + + Args: + set_members (list[str]): nodes of the out_set + + Returns: + list[str]: Nodes of invalid types. + """ + + # Validate all shape types + invalid = [] + shapes = cmds.listRelatives(set_members, + allDescendents=True, + shapes=True, + fullPath=True) or [] + all_shapes = cmds.ls(set_members + shapes, long=True, shapes=True) + for shape in all_shapes: + if cmds.nodeType(shape) not in cls.accepted_output: + invalid.append(shape) + + return invalid + + @classmethod + def validate_controls(cls, set_members): + """Checks if the node types of the set members are valid for controls. + + Args: + set_members (list[str]): list of nodes of the controls_set + + Returns: + list: Controls of disallowed node types. + """ + + # Validate control types + invalid = [] + for node in set_members: + if cmds.nodeType(node) not in cls.accepted_controllers: + invalid.append(node) + + return invalid + + @classmethod + def get_nodes(cls, instance): + """Get the target objectsets and rig sets nodes + + Args: + instance (pyblish.api.Instance): instance + + Returns: + tuple: 2-tuple of list of objectsets, + list of rig sets nodes + """ + objectsets = ["controls_SET", "out_SET"] + rig_sets_nodes = instance.data.get("rig_sets", []) + return objectsets, rig_sets_nodes + + +class ValidateSkeletonRigContents(ValidateRigContents): + """Ensure skeleton rigs contains pipeline-critical content + + The rigs optionally contain at least two object sets: + "skeletonMesh_SET" - Set of the skinned meshes + with bone hierarchies + + """ + + order = ValidateContentsOrder + label = "Skeleton Rig Contents" + hosts = ["maya"] + families = ["rig.fbx"] + optional = True + + @classmethod + def get_invalid(cls, instance): + objectsets, skeleton_mesh_nodes = cls.get_nodes(instance) + cls.validate_missing_objectsets( + instance, objectsets, instance.data["rig_sets"]) + + # Ensure contents in sets and retrieve long path for all objects + output_content = instance.data.get("skeleton_mesh", []) + output_content = cmds.ls(skeleton_mesh_nodes, long=True) + + invalid_hierarchy = cls.invalid_hierarchy( + instance, output_content) + invalid_geometry = cls.validate_geometry(output_content) + + error = False + if invalid_hierarchy: + cls.log.error("Found nodes which reside outside of root group " + "while they are set up for publishing." + "\n%s" % invalid_hierarchy) + error = True + if invalid_geometry: + cls.log.error("Found nodes which reside outside of root group " + "while they are set up for publishing." + "\n%s" % invalid_hierarchy) + error = True + if error: + return invalid_hierarchy + invalid_geometry + + @classmethod + def get_nodes(cls, instance): + """Get the target objectsets and rig sets nodes + + Args: + instance (pyblish.api.Instance): instance + + Returns: + tuple: 2-tuple of list of objectsets, list of rig sets nodes + """ + objectsets = ["skeletonMesh_SET"] + skeleton_mesh_nodes = instance.data.get("skeleton_mesh", []) + return objectsets, skeleton_mesh_nodes diff --git a/client/ayon_maya/plugins/publish/validate_rig_controllers.py b/client/ayon_maya/plugins/publish/validate_rig_controllers.py new file mode 100644 index 00000000..aed08001 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rig_controllers.py @@ -0,0 +1,297 @@ +from maya import cmds + +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, + PublishValidationError, + OptionalPyblishPluginMixin +) +import ayon_maya.api.action +from ayon_maya.api.lib import undo_chunk + + +class ValidateRigControllers(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate rig controllers. + + Controls must have the transformation attributes on their default + values of translate zero, rotate zero and scale one when they are + unlocked attributes. + + Unlocked keyable attributes may not have any incoming connections. If + these connections are required for the rig then lock the attributes. + + The visibility attribute must be locked. + + Note that `repair` will: + - Lock all visibility attributes + - Reset all default values for translate, rotate, scale + - Break all incoming connections to keyable attributes + + """ + order = ValidateContentsOrder + 0.05 + label = "Rig Controllers" + hosts = ["maya"] + families = ["rig"] + optional = True + actions = [RepairAction, + ayon_maya.api.action.SelectInvalidAction] + + # Default controller values + CONTROLLER_DEFAULTS = { + "translateX": 0, + "translateY": 0, + "translateZ": 0, + "rotateX": 0, + "rotateY": 0, + "rotateZ": 0, + "scaleX": 1, + "scaleY": 1, + "scaleZ": 1 + } + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + '{} failed, see log information'.format(self.label) + ) + + @classmethod + def get_invalid(cls, instance): + + controls_set = cls.get_node(instance) + if not controls_set: + cls.log.error( + "Must have 'controls_SET' in rig instance" + ) + return [instance.data["instance_node"]] + + controls = cmds.sets(controls_set, query=True) + + # Ensure all controls are within the top group + lookup = set(instance[:]) + if not all(control in lookup for control in cmds.ls(controls, + long=True)): + cls.log.error( + "All controls must be inside the rig's group." + ) + return [controls_set] + + # Validate all controls + has_connections = list() + has_unlocked_visibility = list() + has_non_default_values = list() + for control in controls: + if cls.get_connected_attributes(control): + has_connections.append(control) + + # check if visibility is locked + attribute = "{}.visibility".format(control) + locked = cmds.getAttr(attribute, lock=True) + if not locked: + has_unlocked_visibility.append(control) + + if cls.get_non_default_attributes(control): + has_non_default_values.append(control) + + if has_connections: + cls.log.error("Controls have input connections: " + "%s" % has_connections) + + if has_non_default_values: + cls.log.error("Controls have non-default values: " + "%s" % has_non_default_values) + + if has_unlocked_visibility: + cls.log.error("Controls have unlocked visibility " + "attribute: %s" % has_unlocked_visibility) + + invalid = [] + if (has_connections or + has_unlocked_visibility or + has_non_default_values): + invalid = set() + invalid.update(has_connections) + invalid.update(has_non_default_values) + invalid.update(has_unlocked_visibility) + invalid = list(invalid) + cls.log.error("Invalid rig controllers. See log for details.") + + return invalid + + @classmethod + def get_non_default_attributes(cls, control): + """Return attribute plugs with non-default values + + Args: + control (str): Name of control node. + + Returns: + list: The invalid plugs + + """ + + invalid = [] + for attr, default in cls.CONTROLLER_DEFAULTS.items(): + if cmds.attributeQuery(attr, node=control, exists=True): + plug = "{}.{}".format(control, attr) + + # Ignore locked attributes + locked = cmds.getAttr(plug, lock=True) + if locked: + continue + + value = cmds.getAttr(plug) + if value != default: + cls.log.warning("Control non-default value: " + "%s = %s" % (plug, value)) + invalid.append(plug) + + return invalid + + @staticmethod + def get_connected_attributes(control): + """Return attribute plugs with incoming connections. + + This will also ensure no (driven) keys on unlocked keyable attributes. + + Args: + control (str): Name of control node. + + Returns: + list: The invalid plugs + + """ + import maya.cmds as mc + + # Support controls without any attributes returning None + attributes = mc.listAttr(control, keyable=True, scalar=True) or [] + invalid = [] + for attr in attributes: + plug = "{}.{}".format(control, attr) + + # Ignore locked attributes + locked = cmds.getAttr(plug, lock=True) + if locked: + continue + + # Ignore proxy connections. + if (cmds.addAttr(plug, query=True, exists=True) and + cmds.addAttr(plug, query=True, usedAsProxy=True)): + continue + + # Check for incoming connections + if cmds.listConnections(plug, source=True, destination=False): + invalid.append(plug) + + return invalid + + @classmethod + def repair(cls, instance): + + controls_set = cls.get_node(instance) + if not controls_set: + cls.log.error( + "Unable to repair because no 'controls_SET' found in rig " + "instance: {}".format(instance) + ) + return + + # Use a single undo chunk + with undo_chunk(): + controls = cmds.sets(controls_set, query=True) + for control in controls: + + # Lock visibility + attr = "{}.visibility".format(control) + locked = cmds.getAttr(attr, lock=True) + if not locked: + cls.log.info("Locking visibility for %s" % control) + cmds.setAttr(attr, lock=True) + + # Remove incoming connections + invalid_plugs = cls.get_connected_attributes(control) + if invalid_plugs: + for plug in invalid_plugs: + cls.log.info("Breaking input connection to %s" % plug) + source = cmds.listConnections(plug, + source=True, + destination=False, + plugs=True)[0] + cmds.disconnectAttr(source, plug) + + # Reset non-default values + invalid_plugs = cls.get_non_default_attributes(control) + if invalid_plugs: + for plug in invalid_plugs: + attr = plug.split(".")[-1] + default = cls.CONTROLLER_DEFAULTS[attr] + cls.log.info("Setting %s to %s" % (plug, default)) + cmds.setAttr(plug, default) + + @classmethod + def get_node(cls, instance): + """Get target object nodes from controls_SET + + Args: + instance (str): instance + + Returns: + list: list of object nodes from controls_SET + """ + return instance.data["rig_sets"].get("controls_SET") + + +class ValidateSkeletonRigControllers(ValidateRigControllers): + """Validate rig controller for skeletonAnim_SET + + Controls must have the transformation attributes on their default + values of translate zero, rotate zero and scale one when they are + unlocked attributes. + + Unlocked keyable attributes may not have any incoming connections. If + these connections are required for the rig then lock the attributes. + + The visibility attribute must be locked. + + Note that `repair` will: + - Lock all visibility attributes + - Reset all default values for translate, rotate, scale + - Break all incoming connections to keyable attributes + + """ + order = ValidateContentsOrder + 0.05 + label = "Skeleton Rig Controllers" + hosts = ["maya"] + families = ["rig.fbx"] + + # Default controller values + CONTROLLER_DEFAULTS = { + "translateX": 0, + "translateY": 0, + "translateZ": 0, + "rotateX": 0, + "rotateY": 0, + "rotateZ": 0, + "scaleX": 1, + "scaleY": 1, + "scaleZ": 1 + } + + @classmethod + def get_node(cls, instance): + """Get target object nodes from skeletonMesh_SET + + Args: + instance (str): instance + + Returns: + list: list of object nodes from skeletonMesh_SET + """ + return instance.data["rig_sets"].get("skeletonMesh_SET") diff --git a/client/ayon_maya/plugins/publish/validate_rig_controllers_arnold_attributes.py b/client/ayon_maya/plugins/publish/validate_rig_controllers_arnold_attributes.py new file mode 100644 index 00000000..4bc47c59 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rig_controllers_arnold_attributes.py @@ -0,0 +1,102 @@ +from maya import cmds + +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, + PublishValidationError, + OptionalPyblishPluginMixin +) + +from ayon_maya.api import lib +import ayon_maya.api.action + + +class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate rig control curves have no keyable arnold attributes. + + The Arnold plug-in will create curve attributes like: + - aiRenderCurve + - aiCurveWidth + - aiSampleRate + - aiCurveShaderR + - aiCurveShaderG + - aiCurveShaderB + + Unfortunately these attributes visible in the channelBox are *keyable* + by default and visible in the channelBox. As such pressing a regular "S" + set key shortcut will set keys on these attributes too, thus cluttering + the animator's scene. + + This validator will ensure they are hidden or unkeyable attributes. + + """ + order = ValidateContentsOrder + 0.05 + label = "Rig Controllers (Arnold Attributes)" + hosts = ["maya"] + families = ["rig"] + optional = False + actions = [RepairAction, + ayon_maya.api.action.SelectInvalidAction] + + attributes = [ + "rcurve", + "cwdth", + "srate", + "ai_curve_shaderr", + "ai_curve_shaderg", + "ai_curve_shaderb" + ] + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError('{} failed, see log ' + 'information'.format(self.label)) + + @classmethod + def get_invalid(cls, instance): + + controls_set = instance.data["rig_sets"].get("controls_SET") + if not controls_set: + return [] + + controls = cmds.sets(controls_set, query=True) or [] + if not controls: + return [] + + shapes = cmds.ls(controls, + dag=True, + leaf=True, + long=True, + shapes=True, + noIntermediate=True) + curves = cmds.ls(shapes, type="nurbsCurve", long=True) + + invalid = list() + for node in curves: + + for attribute in cls.attributes: + if cmds.attributeQuery(attribute, node=node, exists=True): + plug = "{}.{}".format(node, attribute) + if cmds.getAttr(plug, keyable=True): + invalid.append(node) + break + + return invalid + + @classmethod + def repair(cls, instance): + + invalid = cls.get_invalid(instance) + with lib.undo_chunk(): + for node in invalid: + for attribute in cls.attributes: + if cmds.attributeQuery(attribute, node=node, exists=True): + plug = "{}.{}".format(node, attribute) + cmds.setAttr(plug, channelBox=False, keyable=False) diff --git a/client/ayon_maya/plugins/publish/validate_rig_joints_hidden.py b/client/ayon_maya/plugins/publish/validate_rig_joints_hidden.py new file mode 100644 index 00000000..d45af574 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rig_joints_hidden.py @@ -0,0 +1,53 @@ +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateRigJointsHidden(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate all joints are hidden visually. + + This includes being hidden: + - visibility off, + - in a display layer that has visibility off, + - having hidden parents or + - being an intermediate object. + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['rig'] + label = "Joints Hidden" + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + optional = True + + @staticmethod + def get_invalid(instance): + joints = cmds.ls(instance, type='joint', long=True) + return [j for j in joints if lib.is_visible(j, displayLayer=True)] + + def process(self, instance): + """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Visible joints found: {0}".format(invalid)) + + @classmethod + def repair(cls, instance): + import maya.mel as mel + mel.eval("HideJoints") diff --git a/client/ayon_maya/plugins/publish/validate_rig_out_set_node_ids.py b/client/ayon_maya/plugins/publish/validate_rig_out_set_node_ids.py new file mode 100644 index 00000000..e5404aae --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -0,0 +1,162 @@ +import maya.cmds as cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishXmlValidationError, + OptionalPyblishPluginMixin, + get_plugin_settings, + apply_plugin_settings_automatically +) + + +class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate if deformed shapes have related IDs to the original shapes. + + When a deformer is applied in the scene on a referenced mesh that already + had deformers then Maya will create a new shape node for the mesh that + does not have the original id. This validator checks whether the ids are + valid on all the shape nodes in the instance. + + """ + + order = ValidateContentsOrder + families = ["rig"] + hosts = ['maya'] + label = 'Rig Out Set Node Ids' + actions = [ + ayon_maya.api.action.SelectInvalidAction, + RepairAction + ] + allow_history_only = False + optional = False + + @classmethod + def apply_settings(cls, project_settings): + # Preserve automatic settings applying logic + settings = get_plugin_settings(plugin=cls, + project_settings=project_settings, + log=cls.log, + category="maya") + apply_plugin_settings_automatically(cls, settings, logger=cls.log) + + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + """Process all meshes""" + if not self.is_active(instance.data): + return + # Ensure all nodes have a cbId and a related ID to the original shapes + # if a deformer has been created on the shape + invalid = self.get_invalid(instance) + if invalid: + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=ValidateRigOutSetNodeIds, + message=( + "Rig nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) + ) + + @classmethod + def get_invalid(cls, instance): + """Get all nodes which do not match the criteria""" + + out_set = cls.get_node(instance) + if not out_set: + return [] + + invalid = [] + members = cmds.sets(out_set, query=True) + shapes = cmds.ls(members, + dag=True, + leaf=True, + shapes=True, + long=True, + noIntermediate=True) + + for shape in shapes: + sibling_id = lib.get_id_from_sibling( + shape, + history_only=cls.allow_history_only + ) + if sibling_id: + current_id = lib.get_id(shape) + if current_id != sibling_id: + invalid.append(shape) + + return invalid + + @classmethod + def repair(cls, instance): + + for node in cls.get_invalid(instance): + # Get the original id from sibling + sibling_id = lib.get_id_from_sibling( + node, + history_only=cls.allow_history_only + ) + if not sibling_id: + cls.log.error("Could not find ID in siblings for '%s'", node) + continue + + lib.set_id(node, sibling_id, overwrite=True) + + @classmethod + def get_node(cls, instance): + """Get target object nodes from out_SET + + Args: + instance (str): instance + + Returns: + list: list of object nodes from out_SET + """ + return instance.data["rig_sets"].get("out_SET") + + +class ValidateSkeletonRigOutSetNodeIds(ValidateRigOutSetNodeIds): + """Validate if deformed shapes have related IDs to the original shapes + from skeleton set. + + When a deformer is applied in the scene on a referenced mesh that already + had deformers then Maya will create a new shape node for the mesh that + does not have the original id. This validator checks whether the ids are + valid on all the shape nodes in the instance. + + """ + + order = ValidateContentsOrder + families = ["rig.fbx"] + hosts = ['maya'] + label = 'Skeleton Rig Out Set Node Ids' + optional = False + + @classmethod + def get_node(cls, instance): + """Get target object nodes from skeletonMesh_SET + + Args: + instance (str): instance + + Returns: + list: list of object nodes from skeletonMesh_SET + """ + return instance.data["rig_sets"].get( + "skeletonMesh_SET") diff --git a/client/ayon_maya/plugins/publish/validate_rig_output_ids.py b/client/ayon_maya/plugins/publish/validate_rig_output_ids.py new file mode 100644 index 00000000..c5fee5dd --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_rig_output_ids.py @@ -0,0 +1,161 @@ +from collections import defaultdict + +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_maya.api.lib import get_id, set_id +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError +) + + +def get_basename(node): + """Return node short name without namespace""" + return node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] + + +class ValidateRigOutputIds(pyblish.api.InstancePlugin): + """Validate rig output ids. + + Ids must share the same id as similarly named nodes in the scene. This is + to ensure the id from the model is preserved through animation. + + """ + order = ValidateContentsOrder + 0.05 + label = "Rig Output Ids" + hosts = ["maya"] + families = ["rig"] + actions = [RepairAction, + ayon_maya.api.action.SelectInvalidAction] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + + def process(self, instance): + invalid = self.get_invalid(instance, compute=True) + if invalid: + raise PublishValidationError("Found nodes with mismatched IDs.") + + @classmethod + def get_invalid(cls, instance, compute=False): + invalid_matches = cls.get_invalid_matches(instance, compute=compute) + return list(invalid_matches.keys()) + + @classmethod + def get_invalid_matches(cls, instance, compute=False): + invalid = {} + + if compute: + out_set = cls.get_node(instance) + if not out_set: + instance.data["mismatched_output_ids"] = invalid + return invalid + + instance_nodes = cmds.sets(out_set, query=True, nodesOnly=True) + instance_nodes = cmds.ls(instance_nodes, long=True) + for node in instance_nodes: + shapes = cmds.listRelatives(node, shapes=True, fullPath=True) + if shapes: + instance_nodes.extend(shapes) + + scene_nodes = cmds.ls(type="transform", long=True) + scene_nodes += cmds.ls(type="mesh", long=True) + scene_nodes = set(scene_nodes) - set(instance_nodes) + + scene_nodes_by_basename = defaultdict(list) + for node in scene_nodes: + basename = get_basename(node) + scene_nodes_by_basename[basename].append(node) + + for instance_node in instance_nodes: + basename = get_basename(instance_node) + if basename not in scene_nodes_by_basename: + continue + + matches = scene_nodes_by_basename[basename] + + ids = set(get_id(node) for node in matches) + ids.add(get_id(instance_node)) + + if len(ids) > 1: + cls.log.error( + "\"{}\" id mismatch to: {}".format( + instance_node, matches + ) + ) + invalid[instance_node] = matches + + instance.data["mismatched_output_ids"] = invalid + else: + invalid = instance.data["mismatched_output_ids"] + + return invalid + + @classmethod + def repair(cls, instance): + invalid_matches = cls.get_invalid_matches(instance) + + multiple_ids_match = [] + for instance_node, matches in invalid_matches.items(): + ids = set(get_id(node) for node in matches) + + # If there are multiple scene ids matched, and error needs to be + # raised for manual correction. + if len(ids) > 1: + multiple_ids_match.append({"node": instance_node, + "matches": matches}) + continue + + id_to_set = next(iter(ids)) + set_id(instance_node, id_to_set, overwrite=True) + + if multiple_ids_match: + raise PublishValidationError( + "Multiple matched ids found. Please repair manually: " + "{}".format(multiple_ids_match) + ) + + @classmethod + def get_node(cls, instance): + """Get target object nodes from out_SET + + Args: + instance (str): instance + + Returns: + list: list of object nodes from out_SET + """ + return instance.data["rig_sets"].get("out_SET") + + +class ValidateSkeletonRigOutputIds(ValidateRigOutputIds): + """Validate rig output ids from the skeleton sets. + + Ids must share the same id as similarly named nodes in the scene. This is + to ensure the id from the model is preserved through animation. + + """ + order = ValidateContentsOrder + 0.05 + label = "Skeleton Rig Output Ids" + hosts = ["maya"] + families = ["rig.fbx"] + + @classmethod + def get_node(cls, instance): + """Get target object nodes from skeletonMesh_SET + + Args: + instance (str): instance + + Returns: + list: list of object nodes from skeletonMesh_SET + """ + return instance.data["rig_sets"].get("skeletonMesh_SET") diff --git a/client/ayon_maya/plugins/publish/validate_scene_set_workspace.py b/client/ayon_maya/plugins/publish/validate_scene_set_workspace.py new file mode 100644 index 00000000..c7d5de20 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_scene_set_workspace.py @@ -0,0 +1,51 @@ +import os + +import maya.cmds as cmds +import pyblish.api + +from ayon_core.pipeline.publish import ( + PublishValidationError, ValidatePipelineOrder) + + +def is_subdir(path, root_dir): + """ Returns whether path is a subdirectory (or file) within root_dir """ + path = os.path.realpath(path) + root_dir = os.path.realpath(root_dir) + + # If not on same drive + if os.path.splitdrive(path)[0].lower() != os.path.splitdrive(root_dir)[0].lower(): # noqa: E501 + return False + + # Get 'relative path' (can contain ../ which means going up) + relative = os.path.relpath(path, root_dir) + + # Check if the path starts by going up, if so it's not a subdirectory. :) + if relative.startswith(os.pardir) or relative == os.curdir: + return False + else: + return True + + +class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin): + """Validate the scene is inside the currently set Maya workspace""" + + order = ValidatePipelineOrder + hosts = ['maya'] + label = 'Maya Workspace Set' + + def process(self, context): + + scene_name = cmds.file(query=True, sceneName=True) + if not scene_name: + raise PublishValidationError( + "Scene hasn't been saved. Workspace can't be validated.") + + root_dir = cmds.workspace(query=True, rootDirectory=True) + + if not is_subdir(scene_name, root_dir): + raise PublishValidationError( + "Maya workspace is not set correctly.\n\n" + f"Current workfile `{scene_name}` is not inside the " + f"current Maya project root directory `{root_dir}`.\n\n" + "Please use Workfile app to re-save." + ) diff --git a/client/ayon_maya/plugins/publish/validate_setdress_root.py b/client/ayon_maya/plugins/publish/validate_setdress_root.py new file mode 100644 index 00000000..f88e33fd --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_setdress_root.py @@ -0,0 +1,28 @@ +import pyblish.api +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateSetdressRoot(pyblish.api.InstancePlugin): + """Validate if set dress top root node is published.""" + + order = ValidateContentsOrder + label = "SetDress Root" + hosts = ["maya"] + families = ["setdress"] + + def process(self, instance): + from maya import cmds + + if instance.data.get("exactSetMembersOnly"): + return + + set_member = instance.data["setMembers"] + root = cmds.ls(set_member, assemblies=True, long=True) + + if not root or root[0] not in set_member: + raise PublishValidationError( + "Setdress top root node is not being published." + ) diff --git a/client/ayon_maya/plugins/publish/validate_shader_name.py b/client/ayon_maya/plugins/publish/validate_shader_name.py new file mode 100644 index 00000000..ed1ea179 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_shader_name.py @@ -0,0 +1,82 @@ +import re + +import pyblish.api +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder) + + +class ValidateShaderName(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate shader name assigned. + + It should be _<*>_SHD + + """ + optional = True + order = ValidateContentsOrder + families = ["look"] + hosts = ['maya'] + label = 'Validate Shaders Name' + actions = [ayon_maya.api.action.SelectInvalidAction] + regex = r'(?P.*)_(.*)_SHD' + + # The default connections to check + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + ("Found shapes with invalid shader names " + "assigned:\n{}").format(invalid)) + + @classmethod + def get_invalid(cls, instance): + + invalid = [] + + # Get all shapes from the instance + content_instance = instance.data.get("setMembers", None) + if not content_instance: + cls.log.error("Instance has no nodes!") + return True + pass + descendants = cmds.listRelatives(content_instance, + allDescendents=True, + fullPath=True) or [] + + descendants = cmds.ls(descendants, noIntermediate=True, long=True) + shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True) + folder_path = instance.data.get("folderPath") + + # Check the number of connected shadingEngines per shape + regex_compile = re.compile(cls.regex) + error_message = "object {0} has invalid shader name {1}" + for shape in shapes: + shading_engines = cmds.listConnections(shape, + destination=True, + type="shadingEngine") or [] + shaders = cmds.ls( + cmds.listConnections(shading_engines), materials=1 + ) + + for shader in shaders: + m = regex_compile.match(shader) + if m is None: + invalid.append(shape) + cls.log.error(error_message.format(shape, shader)) + else: + if 'asset' in regex_compile.groupindex: + if m.group('asset') != folder_path: + invalid.append(shape) + message = error_message + message += " with missing folder path \"{2}\"" + cls.log.error( + message.format(shape, shader, folder_path) + ) + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_shape_default_names.py b/client/ayon_maya/plugins/publish/validate_shape_default_names.py new file mode 100644 index 00000000..5355cd9c --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_shape_default_names.py @@ -0,0 +1,96 @@ +import re + +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +def short_name(node): + return node.rsplit("|", 1)[-1].rsplit(":", 1)[-1] + + +class ValidateShapeDefaultNames(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates that Shape names are using Maya's default format. + + When you create a new polygon cube Maya will name the transform + and shape respectively: + - ['pCube1', 'pCubeShape1'] + If you rename it to `bar1` it will become: + - ['bar1', 'barShape1'] + Then if you rename it to `bar` it will become: + - ['bar', 'barShape'] + Rename it again to `bar1` it will differ as opposed to before: + - ['bar1', 'bar1Shape'] + Note that bar1Shape != barShape1 + Thus the suffix number can be either in front of Shape or behind it. + Then it becomes harder to define where what number should be when a + node contains multiple shapes, for example with many controls in + rigs existing of multiple curves. + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['model'] + optional = True + label = "Shape Default Naming" + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + + @staticmethod + def _define_default_name(shape): + parent = cmds.listRelatives(shape, parent=True, fullPath=True)[0] + transform = short_name(parent) + return '{0}Shape'.format(transform) + + @staticmethod + def _is_valid(shape): + """ Return whether the shape's name is similar to Maya's default. """ + transform = cmds.listRelatives(shape, parent=True, fullPath=True)[0] + + transform_name = short_name(transform) + shape_name = short_name(shape) + + # A Shape's name can be either {transform}{numSuffix} + # Shape or {transform}Shape{numSuffix} + # Upon renaming nodes in Maya that is + # the pattern Maya will act towards. + transform_no_num = transform_name.rstrip("0123456789") + pattern = '^{transform}[0-9]*Shape[0-9]*$'.format( + transform=transform_no_num) + + if re.match(pattern, shape_name): + return True + else: + return False + + @classmethod + def get_invalid(cls, instance): + shapes = cmds.ls(instance, shapes=True, long=True) + return [shape for shape in shapes if not cls._is_valid(shape)] + + def process(self, instance): + """Process all the shape nodes in the instance""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Incorrectly named shapes found: {0}".format(invalid)) + + @classmethod + def repair(cls, instance): + """Process all the shape nodes in the instance""" + for shape in cls.get_invalid(instance): + correct_shape_name = cls._define_default_name(shape) + cmds.rename(shape, correct_shape_name) diff --git a/client/ayon_maya/plugins/publish/validate_shape_render_stats.py b/client/ayon_maya/plugins/publish/validate_shape_render_stats.py new file mode 100644 index 00000000..e6e2f963 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_shape_render_stats.py @@ -0,0 +1,88 @@ +import pyblish.api + +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateMeshOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateShapeRenderStats(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Ensure all render stats are set to the default values.""" + + order = ValidateMeshOrder + hosts = ['maya'] + families = ['model'] + label = 'Shape Default Render Stats' + actions = [ayon_maya.api.action.SelectInvalidAction, + RepairAction] + + defaults = {'castsShadows': 1, + 'receiveShadows': 1, + 'motionBlur': 1, + 'primaryVisibility': 1, + 'smoothShading': 1, + 'visibleInReflections': 1, + 'visibleInRefractions': 1, + 'doubleSided': 1, + 'opposite': 0} + + @classmethod + def get_invalid(cls, instance): + # It seems the "surfaceShape" and those derived from it have + # `renderStat` attributes. + shapes = cmds.ls(instance, long=True, type='surfaceShape') + invalid = set() + for shape in shapes: + for attr, default_value in cls.defaults.items(): + if cmds.attributeQuery(attr, node=shape, exists=True): + value = cmds.getAttr('{}.{}'.format(shape, attr)) + if value != default_value: + invalid.add(shape) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if not invalid: + return + + defaults_str = "\n".join( + "- {}: {}\n".format(key, value) + for key, value in self.defaults.items() + ) + description = ( + "## Shape Default Render Stats\n" + "Shapes are detected with non-default render stats.\n\n" + "To ensure a model's shapes behave like a shape would by default " + "we require the render stats to have not been altered in " + "the published models.\n\n" + "### How to repair?\n" + "You can reset the default values on the shapes by using the " + "repair action." + ) + + raise PublishValidationError( + "Shapes with non-default renderStats " + "found: {0}".format(", ".join(sorted(invalid))), + description=description, + detail="The expected default values " + "are:\n\n{}".format(defaults_str) + ) + + @classmethod + def repair(cls, instance): + for shape in cls.get_invalid(instance): + for attr, default_value in cls.defaults.items(): + if cmds.attributeQuery(attr, node=shape, exists=True): + plug = '{0}.{1}'.format(shape, attr) + value = cmds.getAttr(plug) + if value != default_value: + cmds.setAttr(plug, default_value) diff --git a/client/ayon_maya/plugins/publish/validate_shape_zero.py b/client/ayon_maya/plugins/publish/validate_shape_zero.py new file mode 100644 index 00000000..6d39e264 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_shape_zero.py @@ -0,0 +1,102 @@ +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateShapeZero(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Shape components may not have any "tweak" values + + To solve this issue, try freezing the shapes. + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + label = "Shape Zero (Freeze)" + actions = [ + ayon_maya.api.action.SelectInvalidAction, + RepairAction + ] + optional = True + + @staticmethod + def get_invalid(instance): + """Returns the invalid shapes in the instance. + + This is the same as checking: + - all(pnt == [0,0,0] for pnt in shape.pnts[:]) + + Returns: + list: Shape with non freezed vertex + + """ + + shapes = cmds.ls(instance, type="shape") + + invalid = [] + for shape in shapes: + if cmds.polyCollapseTweaks(shape, q=True, hasVertexTweaks=True): + invalid.append(shape) + + return invalid + + @classmethod + def repair(cls, instance): + invalid_shapes = cls.get_invalid(instance) + if not invalid_shapes: + return + + with lib.maintained_selection(): + with lib.tool("selectSuperContext"): + for shape in invalid_shapes: + cmds.polyCollapseTweaks(shape) + # cmds.polyCollapseTweaks keeps selecting the geometry + # after each command. When running on many meshes + # after one another this tends to get really heavy + cmds.select(clear=True) + + def process(self, instance): + """Process all the nodes in the instance "objectSet""" + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + title="Shape Component Tweaks", + message="Shapes found with non-zero component tweaks: '{}'" + "".format(", ".join(invalid)), + description=( + "## Shapes found with component tweaks\n" + "Shapes were detected that have component tweaks on their " + "components. Please remove the component tweaks to " + "continue.\n\n" + "### Repair\n" + "The repair action will try to *freeze* the component " + "tweaks into the shapes, which is usually the correct fix " + "if the mesh has no construction history (= has its " + "history deleted)."), + detail=( + "Maya allows to store component tweaks within shape nodes " + "which are applied between its `inMesh` and `outMesh` " + "connections resulting in the output of a shape node " + "differing from the input. We usually want to avoid this " + "for published meshes (in particular for Maya scenes) as " + "it can have unintended results when using these meshes " + "as intermediate meshes since it applies positional " + "differences without being visible edits in the node " + "graph.\n\n" + "These tweaks are traditionally stored in the `.pnts` " + "attribute of shapes.") + ) diff --git a/client/ayon_maya/plugins/publish/validate_single_assembly.py b/client/ayon_maya/plugins/publish/validate_single_assembly.py new file mode 100644 index 00000000..f5d73553 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_single_assembly.py @@ -0,0 +1,43 @@ +import pyblish.api +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) + + +class ValidateSingleAssembly(pyblish.api.InstancePlugin): + """Ensure the content of the instance is grouped in a single hierarchy + + The instance must have a single root node containing all the content. + This root node *must* be a top group in the outliner. + + Example outliner: + root_GRP + -- geometry_GRP + -- mesh_GEO + -- controls_GRP + -- control_CTL + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['rig'] + label = 'Single Assembly' + + def process(self, instance): + from maya import cmds + + assemblies = cmds.ls(instance, assemblies=True) + + # ensure unique (somehow `maya.cmds.ls` doesn't manage that) + assemblies = set(assemblies) + + if len(assemblies) == 0: + raise PublishValidationError( + "One assembly required for: %s (currently empty?)" % instance + ) + elif len(assemblies) > 1: + raise PublishValidationError( + 'Multiple assemblies found: %s' % assemblies + ) diff --git a/client/ayon_maya/plugins/publish/validate_skeletalmesh_hierarchy.py b/client/ayon_maya/plugins/publish/validate_skeletalmesh_hierarchy.py new file mode 100644 index 00000000..172453f1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_skeletalmesh_hierarchy.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishXmlValidationError, + OptionalPyblishPluginMixin +) + +from maya import cmds + + +class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates that nodes has common root.""" + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["skeletalMesh"] + label = "Skeletal Mesh Top Node" + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + geo = instance.data.get("geometry") + joints = instance.data.get("joints") + + joints_parents = cmds.ls(joints, long=True) + geo_parents = cmds.ls(geo, long=True) + + parents_set = { + parent.split("|")[1] for parent in (joints_parents + geo_parents) + } + + self.log.debug(parents_set) + + if len(set(parents_set)) > 2: + raise PublishXmlValidationError( + self, + "Multiple roots on geometry or joints." + ) diff --git a/client/ayon_maya/plugins/publish/validate_skeletalmesh_triangulated.py b/client/ayon_maya/plugins/publish/validate_skeletalmesh_triangulated.py new file mode 100644 index 00000000..7e63ed2a --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_skeletalmesh_triangulated.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from ayon_maya.api.action import ( + SelectInvalidAction, +) +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError +) + + +from maya import cmds + + +class ValidateSkeletalMeshTriangulated(pyblish.api.InstancePlugin): + """Validates that the geometry has been triangulated.""" + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["skeletalMesh"] + label = "Skeletal Mesh Triangulated" + optional = True + actions = [ + SelectInvalidAction, + RepairAction + ] + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "The following objects needs to be triangulated: " + "{}".format(invalid)) + + @classmethod + def get_invalid(cls, instance): + geo = instance.data.get("geometry") + + invalid = [] + + for obj in cmds.listRelatives( + cmds.ls(geo), allDescendents=True, fullPath=True): + n_triangles = cmds.polyEvaluate(obj, triangle=True) + n_faces = cmds.polyEvaluate(obj, face=True) + + if not (isinstance(n_triangles, int) and isinstance(n_faces, int)): + continue + + # We check if the number of triangles is equal to the number of + # faces for each transform node. + # If it is, the object is triangulated. + if cmds.objectType(obj, i="transform") and n_triangles != n_faces: + invalid.append(obj) + + return invalid + + @classmethod + def repair(cls, instance): + for node in cls.get_invalid(instance): + cmds.polyTriangulate(node) diff --git a/client/ayon_maya/plugins/publish/validate_skeleton_top_group_hierarchy.py b/client/ayon_maya/plugins/publish/validate_skeleton_top_group_hierarchy.py new file mode 100644 index 00000000..9fbe0f44 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_skeleton_top_group_hierarchy.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +"""Plugin for validating naming conventions.""" +from maya import cmds + +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateSkeletonTopGroupHierarchy(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates top group hierarchy in the SETs + Make sure the object inside the SETs are always top + group of the hierarchy + + """ + order = ValidateContentsOrder + 0.05 + label = "Skeleton Rig Top Group Hierarchy" + families = ["rig.fbx"] + optional = True + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = [] + skeleton_mesh_data = instance.data("skeleton_mesh", []) + if skeleton_mesh_data: + invalid = self.get_top_hierarchy(skeleton_mesh_data) + if invalid: + raise PublishValidationError( + "The skeletonMesh_SET includes the object which " + "is not at the top hierarchy: {}".format(invalid)) + + def get_top_hierarchy(self, targets): + targets = cmds.ls(targets, long=True) # ensure long names + non_top_hierarchy_list = [ + target for target in targets if target.count("|") > 2 + ] + return non_top_hierarchy_list diff --git a/client/ayon_maya/plugins/publish/validate_skinCluster_deformer_set.py b/client/ayon_maya/plugins/publish/validate_skinCluster_deformer_set.py new file mode 100644 index 00000000..7a7b6a90 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_skinCluster_deformer_set.py @@ -0,0 +1,83 @@ +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate skinClusters on meshes have valid member relationships. + + In rare cases it can happen that a mesh has a skinCluster in its history + but it is *not* included in the deformer relationship history. If this is + the case then FBX will not export the skinning. + + """ + + order = ValidateContentsOrder + hosts = ['maya'] + families = ['fbx'] + label = "Skincluster Deformer Relationships" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + """Process all the transform nodes in the instance""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError( + "Invalid skinCluster relationships found on meshes: {0}" + .format(invalid) + ) + + @classmethod + def get_invalid(cls, instance): + + meshes = cmds.ls(instance, type="mesh", noIntermediate=True, long=True) + invalid = list() + + for mesh in meshes: + history = cmds.listHistory(mesh) or [] + skins = cmds.ls(history, type="skinCluster") + + # Ensure at most one skinCluster + assert len(skins) <= 1, "Cannot have more than one skinCluster" + + if skins: + skin = skins[0] + + # Ensure the mesh is also in the skinCluster set + # otherwise the skin will not be exported correctly + # by the FBX Exporter. + deformer_sets = cmds.listSets(object=mesh, type=2) + for deformer_set in deformer_sets: + used_by = cmds.listConnections(deformer_set + ".usedBy", + source=True, + destination=False) + + # Ignore those that don't seem to have a usedBy connection + if not used_by: + continue + + # We have a matching deformer set relationship + if skin in set(used_by): + break + + else: + invalid.append(mesh) + cls.log.warning( + "Mesh has skinCluster in history but is not included " + "in its deformer relationship set: " + "{0} (skinCluster: {1})".format(mesh, skin) + ) + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_step_size.py b/client/ayon_maya/plugins/publish/validate_step_size.py new file mode 100644 index 00000000..1373048c --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_step_size.py @@ -0,0 +1,50 @@ +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) + + +class ValidateStepSize(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates the step size for the instance is in a valid range. + + For example the `step` size should never be lower or equal to zero. + + """ + + order = ValidateContentsOrder + label = 'Step size' + families = ['camera', + 'pointcache', + 'animation'] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + MIN = 0.01 + MAX = 1.0 + + @classmethod + def get_invalid(cls, instance): + + objset = instance.data['instance_node'] + step = instance.data.get("step", 1.0) + + if step < cls.MIN or step > cls.MAX: + cls.log.warning("Step size is outside of valid range: {0} " + "(valid: {1} to {2})".format(step, + cls.MIN, + cls.MAX)) + return objset + + return [] + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Instance found with invalid step size: {0}".format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_transform_naming_suffix.py b/client/ayon_maya/plugins/publish/validate_transform_naming_suffix.py new file mode 100644 index 00000000..846fc866 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_transform_naming_suffix.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +"""Plugin for validating naming conventions.""" +import json +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates transform suffix based on the type of its children shapes. + + Suffices must be: + - mesh: + _GEO (regular geometry) + _GES (geometry to be smoothed at render) + _GEP (proxy geometry; usually not to be rendered) + _OSD (open subdiv smooth at rendertime) + - nurbsCurve: _CRV + - nurbsSurface: _NRB + - locator: _LOC + - null/group: _GRP + Suffices can also be overridden by project settings. + + .. warning:: + This grabs the first child shape as a reference and doesn't use the + others in the check. + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + optional = True + label = "Suffix Naming Conventions" + actions = [ayon_maya.api.action.SelectInvalidAction] + SUFFIX_NAMING_TABLE = json.dumps({ + "mesh": ["_GEO", "_GES", "_GEP", "_OSD"], + "nurbsCurve": ["_CRV"], + "nurbsSurface": ["_NRB"], + "locator": ["_LOC"], + "group": ["_GRP"] + }) + + ALLOW_IF_NOT_IN_SUFFIX_TABLE = True + + @classmethod + def get_table_for_invalid(cls): + suffix_naming_table = json.loads(cls.SUFFIX_NAMING_TABLE) + ss = [ + " - {}: {}".format(k, ", ".join(v)) + for k, v in suffix_naming_table.items() + ] + return "
".join(ss) + + @staticmethod + def is_valid_name( + node_name, + shape_type, + suffix_naming_table, + allow_if_not_in_suffix_table + ): + """Return whether node's name is correct. + + The correctness for a transform's suffix is dependent on what + `shape_type` it holds. E.g. a transform with a mesh might need and + `_GEO` suffix. + + When `shape_type` is None the transform doesn't have any direct + children shapes. + + Args: + node_name (str): Node name. + shape_type (str): Type of node. + suffix_naming_table (dict): Mapping dict for suffixes. + allow_if_not_in_suffix_table (bool): Default output. + + """ + if shape_type not in suffix_naming_table: + return allow_if_not_in_suffix_table + + suffices = suffix_naming_table[shape_type] + for suffix in suffices: + if node_name.endswith(suffix): + return True + return False + + @classmethod + def get_invalid(cls, instance): + """Get invalid nodes in instance. + + Args: + instance (:class:`pyblish.api.Instance`): published instance. + + """ + transforms = cmds.ls(instance, type="transform", long=True) + + invalid = [] + suffix_naming_table = json.loads(cls.SUFFIX_NAMING_TABLE) + for transform in transforms: + shapes = cmds.listRelatives(transform, + shapes=True, + fullPath=True, + noIntermediate=True) + + shape_type = cmds.nodeType(shapes[0]) if shapes else "group" + if not cls.is_valid_name( + transform, + shape_type, + suffix_naming_table, + cls.ALLOW_IF_NOT_IN_SUFFIX_TABLE + ): + invalid.append(transform) + + return invalid + + def process(self, instance): + """Process all the nodes in the instance. + + Args: + instance (:class:`pyblish.api.Instance`): published instance. + + """ + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + valid = self.get_table_for_invalid() + + names = "
".join( + " - {}".format(node) for node in invalid + ) + valid = valid.replace("\n", "
") + + raise PublishValidationError( + title="Invalid naming suffix", + message="Valid suffixes are:
{0}

" + "Incorrectly named geometry transforms:
{1}" + "".format(valid, names)) diff --git a/client/ayon_maya/plugins/publish/validate_transform_zero.py b/client/ayon_maya/plugins/publish/validate_transform_zero.py new file mode 100644 index 00000000..51fa7938 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_transform_zero.py @@ -0,0 +1,93 @@ +import inspect + +from maya import cmds +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateTransformZero(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Transforms can't have any values + + To solve this issue, try freezing the transforms. So long + as the transforms, rotation and scale values are zero, + you're all good. + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + label = "Transform Zero (Freeze)" + actions = [ayon_maya.api.action.SelectInvalidAction] + + _identity = [1.0, 0.0, 0.0, 0.0, + 0.0, 1.0, 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, + 0.0, 0.0, 0.0, 1.0] + _tolerance = 1e-30 + optional = True + + @classmethod + def get_invalid(cls, instance): + """Returns the invalid transforms in the instance. + + This is the same as checking: + - translate == [0, 0, 0] and rotate == [0, 0, 0] and + scale == [1, 1, 1] and shear == [0, 0, 0] + + .. note:: + This will also catch camera transforms if those + are in the instances. + + Returns: + list: Transforms that are not identity matrix + + """ + + transforms = cmds.ls(instance, type="transform") + + invalid = [] + for transform in transforms: + if ('_LOC' in transform) or ('_loc' in transform): + continue + mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True) + if not all(abs(x - y) < cls._tolerance + for x, y in zip(cls._identity, mat)): + invalid.append(transform) + + return invalid + + def process(self, instance): + """Process all the nodes in the instance "objectSet""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + names = "
".join( + " - {}".format(node) for node in invalid + ) + + raise PublishValidationError( + title="Transform Zero", + description=self.get_description(), + message="The model publish allows no transformations. You must" + " freeze transformations to continue.

" + "Nodes found with transform values:
" + "{0}".format(names)) + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Transform can't have any values + + The model publish allows no transformations. + + You must **freeze transformations** to continue. + + """) diff --git a/client/ayon_maya/plugins/publish/validate_unique_names.py b/client/ayon_maya/plugins/publish/validate_unique_names.py new file mode 100644 index 00000000..74f22926 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_unique_names.py @@ -0,0 +1,46 @@ +from maya import cmds + +import pyblish.api +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateUniqueNames(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """transform names should be unique + + ie: using cmds.ls(someNodeName) should always return shortname + + """ + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["model"] + label = "Unique transform name" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = True + + @staticmethod + def get_invalid(instance): + """Returns the invalid transforms in the instance. + + Returns: + list: Non-unique name transforms. + + """ + + return [tr for tr in cmds.ls(instance, type="transform") + if '|' in tr] + + def process(self, instance): + """Process all the nodes in the instance "objectSet""" + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Nodes found with non-unique names:\n{0}".format(invalid)) diff --git a/client/ayon_maya/plugins/publish/validate_unreal_mesh_triangulated.py b/client/ayon_maya/plugins/publish/validate_unreal_mesh_triangulated.py new file mode 100644 index 00000000..f6207797 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +from maya import cmds +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) +import ayon_maya.api.action + + +class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate if mesh is made of triangles for Unreal Engine""" + + order = ValidateMeshOrder + hosts = ["maya"] + families = ["staticMesh"] + label = "Mesh is Triangulated" + actions = [ayon_maya.api.action.SelectInvalidAction] + active = False + + @classmethod + def get_invalid(cls, instance): + invalid = [] + meshes = cmds.ls(instance, type="mesh", long=True) + for mesh in meshes: + faces = cmds.polyEvaluate(mesh, face=True) + tris = cmds.polyEvaluate(mesh, triangle=True) + if faces != tris: + invalid.append(mesh) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("Found meshes without triangles") diff --git a/client/ayon_maya/plugins/publish/validate_unreal_staticmesh_naming.py b/client/ayon_maya/plugins/publish/validate_unreal_staticmesh_naming.py new file mode 100644 index 00000000..eff98084 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +"""Validator for correct naming of Static Meshes.""" +import re + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate name of Unreal Static Mesh + + Unreals naming convention states that staticMesh should start with `SM` + prefix - SM_[Name]_## (Eg. SM_sube_01).These prefixes can be configured + in Settings UI. This plugin also validates other types of + meshes - collision meshes: + + UBX_[RenderMeshName]*: + Boxes are created with the Box objects type in + Max or with the Cube polygonal primitive in Maya. + You cannot move the vertices around or deform it + in any way to make it something other than a + rectangular prism, or else it will not work. + + UCP_[RenderMeshName]*: + Capsules are created with the Capsule object type. + The capsule does not need to have many segments + (8 is a good number) at all because it is + converted into a true capsule for collision. Like + boxes, you should not move the individual + vertices around. + + USP_[RenderMeshName]*: + Spheres are created with the Sphere object type. + The sphere does not need to have many segments + (8 is a good number) at all because it is + converted into a true sphere for collision. Like + boxes, you should not move the individual + vertices around. + + UCX_[RenderMeshName]*: + Convex objects can be any completely closed + convex 3D shape. For example, a box can also be + a convex object + + This validator also checks if collision mesh [RenderMeshName] matches one + of SM_[RenderMeshName]. + + """ + optional = True + order = ValidateContentsOrder + hosts = ["maya"] + families = ["staticMesh"] + label = "Unreal Static Mesh Name" + actions = [ayon_maya.api.action.SelectInvalidAction] + regex_mesh = r"(?P.*))" + regex_collision = r"(?P.*)" + + @classmethod + def get_invalid(cls, instance): + + invalid = [] + + collision_prefixes = ( + instance.context.data["project_settings"] + ["maya"] + ["create"] + ["CreateUnrealStaticMesh"] + ["collision_prefixes"] + ) + + if cls.validate_mesh: + # compile regex for testing names + regex_mesh = "{}{}".format( + ("_" + cls.static_mesh_prefix) or "", cls.regex_mesh + ) + sm_r = re.compile(regex_mesh) + if not sm_r.match(instance.data.get("productName")): + cls.log.error("Mesh doesn't comply with name validation.") + return True + + if cls.validate_collision: + collision_set = instance.data.get("collisionMembers", None) + # soft-fail is there are no collision objects + if not collision_set: + cls.log.warning("No collision objects to validate.") + return False + + regex_collision = "{}{}_(\\d+)".format( + "(?P({}))_".format( + "|".join("{0}".format(p) for p in collision_prefixes) + ) or "", cls.regex_collision + ) + + cl_r = re.compile(regex_collision) + + folder_name = instance.data["folderEntity"]["name"] + mesh_name = "{}{}".format(folder_name, + instance.data.get("variant", [])) + + for obj in collision_set: + cl_m = cl_r.match(obj) + if not cl_m: + cls.log.error("{} is invalid".format(obj)) + invalid.append(obj) + else: + expected_collision = "{}_{}".format( + cl_m.group("prefix"), + mesh_name + ) + + if not obj.startswith(expected_collision): + + cls.log.error( + "Collision object name doesn't match " + "static mesh name" + ) + cls.log.error("{}_{} != {}_{}*".format( + cl_m.group("prefix"), + cl_m.group("renderName"), + cl_m.group("prefix"), + mesh_name, + )) + invalid.append(obj) + + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + + if not self.validate_mesh and not self.validate_collision: + self.log.debug("Validation of both mesh and collision names" + "is disabled.") + return + + if not instance.data.get("collisionMembers", None): + self.log.debug("There are no collision objects to validate") + return + + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError("Model naming is invalid. See log.") diff --git a/client/ayon_maya/plugins/publish/validate_unreal_up_axis.py b/client/ayon_maya/plugins/publish/validate_unreal_up_axis.py new file mode 100644 index 00000000..f7acd41c --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_unreal_up_axis.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +from maya import cmds +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + RepairAction, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateUnrealUpAxis(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): + """Validate if Z is set as up axis in Maya""" + + optional = True + active = False + order = ValidateContentsOrder + hosts = ["maya"] + families = ["staticMesh"] + label = "Unreal Up-Axis check" + actions = [RepairAction] + + def process(self, context): + if not self.is_active(context.data): + return + + if cmds.upAxis(q=True, axis=True) != "z": + raise PublishValidationError( + "Invalid axis set as up axis" + ) + + @classmethod + def repair(cls, instance): + cmds.upAxis(axis="z", rotateView=True) diff --git a/client/ayon_maya/plugins/publish/validate_visible_only.py b/client/ayon_maya/plugins/publish/validate_visible_only.py new file mode 100644 index 00000000..42525853 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_visible_only.py @@ -0,0 +1,59 @@ +import pyblish.api + +from ayon_maya.api.lib import iter_visible_nodes_in_range +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates at least a single node is visible in frame range. + + This validation only validates if the `visibleOnly` flag is enabled + on the instance - otherwise the validation is skipped. + + """ + order = ValidateContentsOrder + 0.05 + label = "Alembic Visible Only" + hosts = ["maya"] + families = ["pointcache", "animation"] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + if not instance.data.get("visibleOnly", False): + self.log.debug("Visible only is disabled. Validation skipped..") + return + + invalid = self.get_invalid(instance) + if invalid: + start, end = self.get_frame_range(instance) + raise PublishValidationError( + f"No visible nodes found in frame range {start}-{end}." + ) + + @classmethod + def get_invalid(cls, instance): + + if instance.data["productType"] == "animation": + # Special behavior to use the nodes in out_SET + nodes = instance.data["out_hierarchy"] + else: + nodes = instance[:] + + start, end = cls.get_frame_range(instance) + if not any(iter_visible_nodes_in_range(nodes, start, end)): + # Return the nodes we have considered so the user can identify + # them with the select invalid action + return nodes + + @staticmethod + def get_frame_range(instance): + data = instance.data + return data["frameStartHandle"], data["frameEndHandle"] diff --git a/client/ayon_maya/plugins/publish/validate_vray.py b/client/ayon_maya/plugins/publish/validate_vray.py new file mode 100644 index 00000000..db782126 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_vray.py @@ -0,0 +1,18 @@ +from maya import cmds + +import pyblish.api +from ayon_core.pipeline.publish import PublishValidationError + + +class ValidateVray(pyblish.api.InstancePlugin): + """Validate general Vray setup.""" + + order = pyblish.api.ValidatorOrder + label = 'VRay' + hosts = ["maya"] + families = ["vrayproxy"] + + def process(self, instance): + # Validate vray plugin is loaded. + if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): + raise PublishValidationError("Vray plugin is not loaded.") diff --git a/client/ayon_maya/plugins/publish/validate_vray_distributed_rendering.py b/client/ayon_maya/plugins/publish/validate_vray_distributed_rendering.py new file mode 100644 index 00000000..76c87f3e --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_vray_distributed_rendering.py @@ -0,0 +1,68 @@ +import pyblish.api +from maya import cmds + +from ayon_maya.api import lib +from ayon_core.pipeline.publish import ( + KnownPublishError, + PublishValidationError, + RepairAction, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) + + +class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate V-Ray Distributed Rendering is ignored in batch mode. + + Whenever Distributed Rendering is enabled for V-Ray in the render settings + ensure that the "Ignore in batch mode" is enabled so the submitted job + won't try to render each frame with all machines resulting in faulty + errors. + + """ + + order = ValidateContentsOrder + label = "VRay Distributed Rendering" + families = ["renderlayer"] + actions = [RepairAction] + optional = False + + # V-Ray attribute names + enabled_attr = "vraySettings.sys_distributed_rendering_on" + ignored_attr = "vraySettings.sys_distributed_rendering_ignore_batch" + + def process(self, instance): + if not self.is_active(instance.data): + return + if instance.data.get("renderer") != "vray": + # If not V-Ray, ignore + return + + vray_settings = cmds.ls("vraySettings", type="VRaySettingsNode") + if not vray_settings: + raise KnownPublishError( + "Please ensure a VRay Settings Node is present" + ) + + renderlayer = instance.data['renderlayer'] + + if not lib.get_attr_in_layer(self.enabled_attr, layer=renderlayer): + # If not distributed rendering enabled, ignore.. + return + + # If distributed rendering is enabled but it is *not* set to ignore + # during batch mode we invalidate the instance + if not lib.get_attr_in_layer(self.ignored_attr, layer=renderlayer): + raise PublishValidationError( + "Renderlayer has distributed rendering enabled " + "but is not set to ignore in batch mode.") + + @classmethod + def repair(cls, instance): + + renderlayer = instance.data.get("renderlayer") + with lib.renderlayer(renderlayer): + cls.log.debug("Enabling Distributed Rendering " + "ignore in batch mode..") + cmds.setAttr(cls.ignored_attr, True) diff --git a/client/ayon_maya/plugins/publish/validate_vray_referenced_aovs.py b/client/ayon_maya/plugins/publish/validate_vray_referenced_aovs.py new file mode 100644 index 00000000..9df5fb84 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_vray_referenced_aovs.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +"""Validate if there are AOVs pulled from references.""" +import pyblish.api +import types +from maya import cmds + +from ayon_core.pipeline.publish import ( + RepairContextAction, + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate whether the V-Ray Render Elements (AOVs) include references. + + This will check if there are AOVs pulled from references. If + `Vray Use Referenced Aovs` is checked on render instance, u must add those + manually to Render Elements as Pype will expect them to be rendered. + + """ + + order = pyblish.api.ValidatorOrder + label = 'VRay Referenced AOVs' + hosts = ['maya'] + families = ['renderlayer'] + actions = [RepairContextAction] + optional = False + + def process(self, instance): + """Plugin main entry point.""" + if not self.is_active(instance.data): + return + if instance.data.get("renderer") != "vray": + # If not V-Ray ignore.. + return + + ref_aovs = cmds.ls( + type=["VRayRenderElement", "VRayRenderElementSet"], + referencedNodes=True) + ref_aovs_enabled = ValidateVrayReferencedAOVs.maya_is_true( + cmds.getAttr("vraySettings.relements_usereferenced")) + + if not instance.data.get("vrayUseReferencedAovs"): + if ref_aovs_enabled and ref_aovs: + self.log.warning(( + "Referenced AOVs are enabled in Vray " + "Render Settings and are detected in scene, but " + "AYON render instance option for referenced AOVs is " + "disabled. Those AOVs will be rendered but not published " + "by Pype." + )) + self.log.warning(", ".join(ref_aovs)) + else: + if not ref_aovs: + self.log.warning(( + "Use of referenced AOVs enabled but there are none " + "in the scene." + )) + if not ref_aovs_enabled: + self.log.error(( + "'Use referenced' not enabled in Vray Render Settings." + )) + raise PublishValidationError("Invalid render settings") + + @classmethod + def repair(cls, context): + """Repair action.""" + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + node = cmds.createNode("VRaySettingsNode") + else: + node = vray_settings[0] + + cmds.setAttr("{}.relements_usereferenced".format(node), True) + + @staticmethod + def maya_is_true(attr_val): + """Whether a Maya attr evaluates to True. + + When querying an attribute value from an ambiguous object the + Maya API will return a list of values, which need to be properly + handled to evaluate properly. + + Args: + attr_val (mixed): Maya attribute to be evaluated as bool. + + Returns: + bool: cast Maya attribute to Pythons boolean value. + + """ + if isinstance(attr_val, bool): + return attr_val + elif isinstance(attr_val, (list, types.GeneratorType)): + return any(attr_val) + else: + return bool(attr_val) diff --git a/client/ayon_maya/plugins/publish/validate_vray_translator_settings.py b/client/ayon_maya/plugins/publish/validate_vray_translator_settings.py new file mode 100644 index 00000000..a3d93dd9 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_vray_translator_settings.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +"""Validate VRay Translator settings.""" +import pyblish.api +from ayon_core.pipeline.publish import ( + context_plugin_should_run, + RepairContextAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + +from maya import cmds + + +class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): + """Validate VRay Translator settings for extracting vrscenes.""" + + order = ValidateContentsOrder + label = "VRay Translator Settings" + families = ["vrayscene_layer"] + actions = [RepairContextAction] + optional = False + + def process(self, context): + """Plugin entry point.""" + if not self.is_active(context.data): + return + # Workaround bug pyblish-base#250 + if not context_plugin_should_run(self, context): + return + + invalid = self.get_invalid(context) + if invalid: + raise PublishValidationError( + message="Found invalid VRay Translator settings", + title=self.label + ) + + @classmethod + def get_invalid(cls, context): + """Get invalid instances.""" + invalid = False + + # Get vraySettings node + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + raise PublishValidationError( + "Please ensure a VRay Settings Node is present", + title=cls.label + ) + + node = vray_settings[0] + + if cmds.setAttr("{}.vrscene_render_on".format(node)): + cls.log.error( + "Render is enabled, for export it should be disabled") + invalid = True + + if not cmds.getAttr("{}.vrscene_on".format(node)): + cls.log.error("Export vrscene not enabled") + invalid = True + + for instance in context: + if "vrayscene_layer" not in instance.data.get("families"): + continue + + if instance.data.get("vraySceneMultipleFiles"): + if not cmds.getAttr("{}.misc_eachFrameInFile".format(node)): + cls.log.error("Each Frame in File not enabled") + invalid = True + else: + if cmds.getAttr("{}.misc_eachFrameInFile".format(node)): + cls.log.error("Each Frame in File is enabled") + invalid = True + + vrscene_filename = cmds.getAttr("{}.vrscene_filename".format(node)) + if vrscene_filename != "vrayscene///": + cls.log.error("Template for file name is wrong") + invalid = True + + return invalid + + @classmethod + def repair(cls, context): + """Repair invalid settings.""" + vray_settings = cmds.ls(type="VRaySettingsNode") + if not vray_settings: + node = cmds.createNode("VRaySettingsNode") + else: + node = vray_settings[0] + + cmds.setAttr("{}.vrscene_render_on".format(node), False) + cmds.setAttr("{}.vrscene_on".format(node), True) + for instance in context: + if "vrayscene" not in instance.data.get("families"): + continue + + if instance.data.get("vraySceneMultipleFiles"): + cmds.setAttr("{}.misc_eachFrameInFile".format(node), True) + else: + cmds.setAttr("{}.misc_eachFrameInFile".format(node), False) + cmds.setAttr("{}.vrscene_filename".format(node), + "vrayscene///", + type="string") diff --git a/client/ayon_maya/plugins/publish/validate_vrayproxy.py b/client/ayon_maya/plugins/publish/validate_vrayproxy.py new file mode 100644 index 00000000..0288d4b8 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_vrayproxy.py @@ -0,0 +1,37 @@ +import pyblish.api + +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, + PublishValidationError +) + + +class ValidateVrayProxy(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + + order = pyblish.api.ValidatorOrder + label = "VRay Proxy Settings" + hosts = ["maya"] + families = ["vrayproxy"] + optional = False + + def process(self, instance): + data = instance.data + if not self.is_active(data): + return + if not data["setMembers"]: + raise PublishValidationError( + f"Instance '{instance.name}' is empty." + ) + + if data["animation"]: + if data["frameEnd"] < data["frameStart"]: + raise PublishValidationError( + "End frame is smaller than start frame" + ) + + if not data["vrmesh"] and not data["alembic"]: + raise PublishValidationError( + "Both vrmesh and alembic are off. Needs at least one to" + " publish." + ) diff --git a/client/ayon_maya/plugins/publish/validate_vrayproxy_members.py b/client/ayon_maya/plugins/publish/validate_vrayproxy_members.py new file mode 100644 index 00000000..40315a1e --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_vrayproxy_members.py @@ -0,0 +1,45 @@ +import pyblish.api + +from maya import cmds + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) + + + +class ValidateVrayProxyMembers(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate whether the V-Ray Proxy instance has shape members""" + + order = pyblish.api.ValidatorOrder + label = 'VRay Proxy Members' + hosts = ['maya'] + families = ['vrayproxy'] + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + + if invalid: + raise PublishValidationError("'%s' is invalid VRay Proxy for " + "export!" % instance.name) + + @classmethod + def get_invalid(cls, instance): + + shapes = cmds.ls(instance, + shapes=True, + noIntermediate=True, + long=True) + + if not shapes: + cls.log.error("'%s' contains no shapes." % instance.name) + + # Return the instance itself + return [instance.name] diff --git a/client/ayon_maya/plugins/publish/validate_xgen.py b/client/ayon_maya/plugins/publish/validate_xgen.py new file mode 100644 index 00000000..7e0f01c4 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_xgen.py @@ -0,0 +1,70 @@ +import json + +import maya.cmds as cmds +import xgenm + +import pyblish.api +from ayon_core.pipeline.publish import PublishValidationError + + +class ValidateXgen(pyblish.api.InstancePlugin): + """Validate Xgen data.""" + + label = "Validate Xgen" + order = pyblish.api.ValidatorOrder + host = ["maya"] + families = ["xgen"] + + def process(self, instance): + set_members = instance.data.get("setMembers") + + # Only 1 collection/node per instance. + if len(set_members) != 1: + raise PublishValidationError( + "Only one collection per instance is allowed." + " Found:\n{}".format(set_members) + ) + + # Only xgen palette node is allowed. + node_type = cmds.nodeType(set_members[0]) + if node_type != "xgmPalette": + raise PublishValidationError( + "Only node of type \"xgmPalette\" are allowed. Referred to as" + " \"collection\" in the Maya UI." + " Node type found: {}".format(node_type) + ) + + # Can't have inactive modifiers in collection cause Xgen will try and + # look for them when loading. + palette = instance.data["xgmPalette"].replace("|", "") + inactive_modifiers = {} + for description in instance.data["xgmDescriptions"]: + description = description.split("|")[-2] + modifier_names = xgenm.fxModules(palette, description) + for name in modifier_names: + attr = xgenm.getAttr("active", palette, description, name) + # Attribute value are lowercase strings of false/true. + if attr == "false": + try: + inactive_modifiers[description].append(name) + except KeyError: + inactive_modifiers[description] = [name] + + if inactive_modifiers: + raise PublishValidationError( + "There are inactive modifiers on the collection. " + "Please delete these:\n{}".format( + json.dumps(inactive_modifiers, indent=4, sort_keys=True) + ) + ) + + # We need a namespace else there will be a naming conflict when + # extracting because of stripping namespaces and parenting to world. + node_names = [instance.data["xgmPalette"]] + node_names.extend(instance.data["xgenConnections"]) + non_namespaced_nodes = [n for n in node_names if ":" not in n] + if non_namespaced_nodes: + raise PublishValidationError( + "Could not find namespace on {}. Namespace is required for" + " xgen publishing.".format(non_namespaced_nodes) + ) diff --git a/client/ayon_maya/plugins/publish/validate_yeti_renderscript_callbacks.py b/client/ayon_maya/plugins/publish/validate_yeti_renderscript_callbacks.py new file mode 100644 index 00000000..086cb7b1 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_yeti_renderscript_callbacks.py @@ -0,0 +1,124 @@ +from maya import cmds + +import pyblish.api +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Check if the render script callbacks will be used during the rendering + + In order to ensure the render tasks are executed properly we need to check + if the pre and post render callbacks are actually used. + + For example: + Yeti is not loaded but its callback scripts are still set in the + render settings. This will cause an error because Maya tries to find + and execute the callbacks. + + Developer note: + The pre and post render callbacks cannot be overridden + + """ + + order = ValidateContentsOrder + label = "Yeti Render Script Callbacks" + hosts = ["maya"] + families = ["renderlayer"] + optional = False + + # Settings per renderer + callbacks = { + "vray": { + "pre": "catch(`pgYetiVRayPreRender`)", + "post": "catch(`pgYetiVRayPostRender`)" + }, + "arnold": { + "pre": "pgYetiPreRender" + } + } + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + f"Invalid render callbacks found for '{instance.name}'.") + + @classmethod + def get_invalid(cls, instance): + + yeti_loaded = cmds.pluginInfo("pgYetiMaya", query=True, loaded=True) + + if not yeti_loaded and not cmds.ls(type="pgYetiMaya"): + # The yeti plug-in is available and loaded so at + # this point we don't really care whether the scene + # has any yeti callback set or not since if the callback + # is there it wouldn't error and if it weren't then + # nothing happens because there are no yeti nodes. + cls.log.debug( + "Yeti is loaded but no yeti nodes were found. " + "Callback validation skipped.." + ) + return False + + renderer = instance.data["renderer"] + if renderer == "redshift": + cls.log.debug("Redshift ignores any pre and post render callbacks") + return False + + callback_lookup = cls.callbacks.get(renderer, {}) + if not callback_lookup: + cls.log.warning("Renderer '%s' is not supported in this plugin" + % renderer) + return False + + pre_mel = cmds.getAttr("defaultRenderGlobals.preMel") or "" + post_mel = cmds.getAttr("defaultRenderGlobals.postMel") or "" + + if pre_mel.strip(): + cls.log.debug("Found pre mel: `%s`" % pre_mel) + + if post_mel.strip(): + cls.log.debug("Found post mel: `%s`" % post_mel) + + # Strip callbacks and turn into a set for quick lookup + pre_callbacks = {cmd.strip() for cmd in pre_mel.split(";")} + post_callbacks = {cmd.strip() for cmd in post_mel.split(";")} + + pre_script = callback_lookup.get("pre", "") + post_script = callback_lookup.get("post", "") + + # If Yeti is not loaded + invalid = False + if not yeti_loaded: + if pre_script and pre_script in pre_callbacks: + cls.log.error("Found pre render callback '%s' which is not " + "uses!" % pre_script) + invalid = True + + if post_script and post_script in post_callbacks: + cls.log.error("Found post render callback '%s which is " + "not used!" % post_script) + invalid = True + + # If Yeti is loaded + else: + if pre_script and pre_script not in pre_callbacks: + cls.log.error( + "Could not find required pre render callback " + "`%s`" % pre_script) + invalid = True + + if post_script and post_script not in post_callbacks: + cls.log.error( + "Could not find required post render callback" + " `%s`" % post_script) + invalid = True + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_yeti_rig_cache_state.py b/client/ayon_maya/plugins/publish/validate_yeti_rig_cache_state.py new file mode 100644 index 00000000..87887b7d --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_yeti_rig_cache_state.py @@ -0,0 +1,72 @@ +import inspect + +import pyblish.api +import maya.cmds as cmds +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + RepairAction, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateYetiRigCacheState(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate the I/O attributes of the node + + Every pgYetiMaya cache node per instance should have: + 1. Input Mode is set to `None` + 2. Input Cache File Name is empty + + """ + + order = pyblish.api.ValidatorOrder + label = "Yeti Rig Cache State" + hosts = ["maya"] + families = ["yetiRig"] + actions = [RepairAction, + ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Nodes have incorrect I/O settings", + description=inspect.getdoc(self) + ) + + @classmethod + def get_invalid(cls, instance): + + invalid = [] + + yeti_nodes = cmds.ls(instance, type="pgYetiMaya") + for node in yeti_nodes: + # Check reading state + state = cmds.getAttr("%s.fileMode" % node) + if state == 1: + cls.log.error("Node `%s` is set to mode `cache`" % node) + invalid.append(node) + continue + + # Check reading state + has_cache = cmds.getAttr("%s.cacheFileName" % node) + if has_cache: + cls.log.error("Node `%s` has a cache file set" % node) + invalid.append(node) + continue + + return invalid + + @classmethod + def repair(cls, instance): + """Repair all errors""" + + # Create set to ensure all nodes only pass once + invalid = cls.get_invalid(instance) + for node in invalid: + cmds.setAttr("%s.fileMode" % node, 0) + cmds.setAttr("%s.cacheFileName" % node, "", type="string") diff --git a/client/ayon_maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/client/ayon_maya/plugins/publish/validate_yeti_rig_input_in_instance.py new file mode 100644 index 00000000..59a6867c --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -0,0 +1,52 @@ +from maya import cmds + +import pyblish.api + +import ayon_maya.api.action +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateYetiRigInputShapesInInstance(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate if all input nodes are part of the instance's hierarchy""" + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["yetiRig"] + label = "Yeti Rig Input Shapes In Instance" + actions = [ayon_maya.api.action.SelectInvalidAction] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError("Yeti Rig has invalid input meshes") + + @classmethod + def get_invalid(cls, instance): + + input_set = next((i for i in instance if i == "input_SET"), None) + assert input_set, "Current %s instance has no `input_SET`" % instance + + # Get all children, we do not care about intermediates + input_nodes = cmds.ls(cmds.sets(input_set, query=True), long=True) + dag = cmds.ls(input_nodes, dag=True, long=True) + shapes = cmds.ls(dag, long=True, shapes=True, noIntermediate=True) + + # Allow publish without input meshes. + if not shapes: + cls.log.debug("Found no input meshes for %s, skipping ..." + % instance) + return [] + + # check if input node is part of groomRig instance + instance_lookup = set(instance[:]) + invalid = [s for s in shapes if s not in instance_lookup] + + return invalid diff --git a/client/ayon_maya/plugins/publish/validate_yeti_rig_settings.py b/client/ayon_maya/plugins/publish/validate_yeti_rig_settings.py new file mode 100644 index 00000000..6bd2ebb7 --- /dev/null +++ b/client/ayon_maya/plugins/publish/validate_yeti_rig_settings.py @@ -0,0 +1,61 @@ +import pyblish.api + +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateYetiRigSettings(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate Yeti Rig Settings have collected input connections. + + The input connections are collected for the nodes in the `input_SET`. + When no input connections are found a warning is logged but it is allowed + to pass validation. + + """ + + order = pyblish.api.ValidatorOrder + label = "Yeti Rig Settings" + families = ["yetiRig"] + optional = False + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + ("Detected invalid Yeti Rig data. (See log) " + "Tip: Save the scene")) + + @classmethod + def get_invalid(cls, instance): + + rigsettings = instance.data.get("rigsettings", None) + if rigsettings is None: + cls.log.error("MAJOR ERROR: No rig settings found!") + return True + + # Get inputs + inputs = rigsettings.get("inputs", []) + if not inputs: + # Empty rig settings dictionary + cls.log.warning("No rig inputs found. This can happen when " + "the rig has no inputs from outside the rig.") + return False + + for input in inputs: + source_id = input["sourceID"] + if source_id is None: + cls.log.error("Discovered source with 'None' as ID, please " + "check if the input shape has a cbId") + return True + + destination_id = input["destinationID"] + if destination_id is None: + cls.log.error("Discovered None as destination ID value") + return True + + return False diff --git a/client/ayon_maya/plugins/workfile_build/load_placeholder.py b/client/ayon_maya/plugins/workfile_build/load_placeholder.py new file mode 100644 index 00000000..6cf38e59 --- /dev/null +++ b/client/ayon_maya/plugins/workfile_build/load_placeholder.py @@ -0,0 +1,132 @@ +from maya import cmds + +from ayon_core.pipeline.workfile.workfile_template_builder import ( + PlaceholderLoadMixin, + LoadPlaceholderItem +) +from ayon_maya.api.lib import ( + get_container_transforms, + get_node_parent, + get_node_index_under_parent +) +from ayon_maya.api.workfile_template_builder import ( + MayaPlaceholderPlugin, +) + + +class MayaPlaceholderLoadPlugin(MayaPlaceholderPlugin, PlaceholderLoadMixin): + identifier = "maya.load" + label = "Maya load" + + item_class = LoadPlaceholderItem + + def _create_placeholder_name(self, placeholder_data): + + # Split builder type: context_assets, linked_assets, all_assets + prefix, suffix = placeholder_data["builder_type"].split("_", 1) + parts = [prefix] + + # add family if any + placeholder_product_type = placeholder_data.get("product_type") + if placeholder_product_type is None: + placeholder_product_type = placeholder_data.get("family") + + if placeholder_product_type: + parts.append(placeholder_product_type) + + # add loader arguments if any + loader_args = placeholder_data["loader_args"] + if loader_args: + loader_args = eval(loader_args) + for value in loader_args.values(): + parts.append(str(value)) + + parts.append(suffix) + placeholder_name = "_".join(parts) + + return placeholder_name.capitalize() + + def _get_loaded_repre_ids(self): + loaded_representation_ids = self.builder.get_shared_populate_data( + "loaded_representation_ids" + ) + if loaded_representation_ids is None: + try: + containers = cmds.sets("AVALON_CONTAINERS", q=True) + except ValueError: + containers = [] + + loaded_representation_ids = { + cmds.getAttr(container + ".representation") + for container in containers + } + self.builder.set_shared_populate_data( + "loaded_representation_ids", loaded_representation_ids + ) + return loaded_representation_ids + + def populate_placeholder(self, placeholder): + self.populate_load_placeholder(placeholder) + + def repopulate_placeholder(self, placeholder): + repre_ids = self._get_loaded_repre_ids() + self.populate_load_placeholder(placeholder, repre_ids) + + def get_placeholder_options(self, options=None): + return self.get_load_plugin_options(options) + + def load_succeed(self, placeholder, container): + self._parent_in_hierarchy(placeholder, container) + + def _parent_in_hierarchy(self, placeholder, container): + """Parent loaded container to placeholder's parent. + + ie : Set loaded content as placeholder's sibling + + Args: + container (str): Placeholder loaded containers + """ + + if not container: + return + + # TODO: This currently returns only a single root but a loaded scene + # could technically load more than a single root + container_root = get_container_transforms(container, root=True) + + # Bugfix: The get_container_transforms does not recognize the load + # reference group currently + # TODO: Remove this when it does + parent = get_node_parent(container_root) + if parent: + container_root = parent + roots = [container_root] + + # Add the loaded roots to the holding sets if they exist + holding_sets = cmds.listSets(object=placeholder.scene_identifier) or [] + for holding_set in holding_sets: + cmds.sets(roots, forceElement=holding_set) + + # Parent the roots to the place of the placeholder locator and match + # its matrix + placeholder_form = cmds.xform( + placeholder.scene_identifier, + query=True, + matrix=True, + worldSpace=True + ) + scene_parent = get_node_parent(placeholder.scene_identifier) + for node in set(roots): + cmds.xform(node, matrix=placeholder_form, worldSpace=True) + + if scene_parent != get_node_parent(node): + if scene_parent: + node = cmds.parent(node, scene_parent)[0] + else: + node = cmds.parent(node, world=True)[0] + + # Move loaded nodes in index order next to their placeholder node + cmds.reorder(node, back=True) + index = get_node_index_under_parent(placeholder.scene_identifier) + cmds.reorder(node, front=True) + cmds.reorder(node, relative=index + 1) diff --git a/client/ayon_maya/plugins/workfile_build/script_placeholder.py b/client/ayon_maya/plugins/workfile_build/script_placeholder.py new file mode 100644 index 00000000..ff543062 --- /dev/null +++ b/client/ayon_maya/plugins/workfile_build/script_placeholder.py @@ -0,0 +1,201 @@ +from maya import cmds + +from ayon_maya.api.workfile_template_builder import ( + MayaPlaceholderPlugin +) +from ayon_core.lib import NumberDef, TextDef, EnumDef +from ayon_core.lib.events import weakref_partial + + +EXAMPLE_SCRIPT = """ +# Access maya commands +from maya import cmds + +# Access the placeholder node +placeholder_node = placeholder.scene_identifier + +# Access the event callback +if event is None: + print(f"Populating {placeholder}") +else: + if event.topic == "template.depth_processed": + print(f"Processed depth: {event.get('depth')}") + elif event.topic == "template.finished": + print("Build finished.") +""".strip() + + +class MayaPlaceholderScriptPlugin(MayaPlaceholderPlugin): + """Execute a script at the given `order` during workfile build. + + This is a very low-level placeholder to run Python scripts at a given + point in time during the workfile template build. + + It can create either a locator or an objectSet as placeholder node. + It defaults to an objectSet, since allowing to run on e.g. other + placeholder node members can be useful, e.g. using: + + >>> members = cmds.sets(placeholder.scene_identifier, query=True) + + """ + + identifier = "maya.runscript" + label = "Run Python Script" + + use_selection_as_parent = False + + def get_placeholder_options(self, options=None): + options = options or {} + return [ + NumberDef( + "order", + label="Order", + default=options.get("order") or 0, + decimals=0, + minimum=0, + maximum=999, + tooltip=( + "Order" + "\nOrder defines asset loading priority (0 to 999)" + "\nPriority rule is : \"lowest is first to load\"." + ) + ), + TextDef( + "prepare_script", + label="Run at\nprepare", + tooltip="Run before populate at prepare order", + multiline=True, + default=options.get("prepare_script", "") + ), + TextDef( + "populate_script", + label="Run at\npopulate", + tooltip="Run script at populate node order
" + "This is the default behavior", + multiline=True, + default=options.get("populate_script", EXAMPLE_SCRIPT) + ), + TextDef( + "depth_processed_script", + label="Run after\ndepth\niteration", + tooltip="Run script after every build depth iteration", + multiline=True, + default=options.get("depth_processed_script", "") + ), + TextDef( + "finished_script", + label="Run after\nbuild", + tooltip=( + "Run script at build finished.
" + "Note: this even runs if other placeholders had " + "errors during the build" + ), + multiline=True, + default=options.get("finished_script", "") + ), + EnumDef( + "create_nodetype", + label="Nodetype", + items={ + "spaceLocator": "Locator", + "objectSet": "ObjectSet" + }, + tooltip=( + "The placeholder's node type to be created.
" + "Note this only works on create, not on update" + ), + default=options.get("create_nodetype", "objectSet") + ), + ] + + def create_placeholder(self, placeholder_data): + nodetype = placeholder_data.get("create_nodetype", "objectSet") + + if nodetype == "spaceLocator": + super(MayaPlaceholderScriptPlugin, self).create_placeholder( + placeholder_data + ) + elif nodetype == "objectSet": + placeholder_data["plugin_identifier"] = self.identifier + + # Create maya objectSet on selection + selection = cmds.ls(selection=True, long=True) + name = self._create_placeholder_name(placeholder_data) + node = cmds.sets(selection, name=name) + + self.imprint(node, placeholder_data) + + def prepare_placeholders(self, placeholders): + super(MayaPlaceholderScriptPlugin, self).prepare_placeholders( + placeholders + ) + for placeholder in placeholders: + prepare_script = placeholder.data.get("prepare_script") + if not prepare_script: + continue + + self.run_script(placeholder, prepare_script) + + def populate_placeholder(self, placeholder): + + populate_script = placeholder.data.get("populate_script") + depth_script = placeholder.data.get("depth_processed_script") + finished_script = placeholder.data.get("finished_script") + + # Run now + if populate_script: + self.run_script(placeholder, populate_script) + + if not any([depth_script, finished_script]): + # No callback scripts to run + if not placeholder.data.get("keep_placeholder", True): + self.delete_placeholder(placeholder) + return + + # Run at each depth processed + if depth_script: + callback = weakref_partial( + self.run_script, placeholder, depth_script) + self.builder.add_on_depth_processed_callback( + callback, order=placeholder.order) + + # Run at build finish + if finished_script: + callback = weakref_partial( + self.run_script, placeholder, finished_script) + self.builder.add_on_finished_callback( + callback, order=placeholder.order) + + # If placeholder should be deleted, delete it after finish so + # the scripts have access to it up to the last run + if not placeholder.data.get("keep_placeholder", True): + delete_callback = weakref_partial( + self.delete_placeholder, placeholder) + self.builder.add_on_finished_callback( + delete_callback, order=placeholder.order + 1) + + def run_script(self, placeholder, script, event=None): + """Run script + + Even though `placeholder` is an unused arguments by exposing it as + an input argument it means it makes it available through + globals()/locals() in the `exec` call, giving the script access + to the placeholder. + + For example: + >>> node = placeholder.scene_identifier + + In the case the script is running at a callback level (not during + populate) then it has access to the `event` as well, otherwise the + value is None if it runs during `populate_placeholder` directly. + + For example adding this as the callback script: + >>> if event is not None: + >>> if event.topic == "on_depth_processed": + >>> print(f"Processed depth: {event.get('depth')}") + >>> elif event.topic == "on_finished": + >>> print("Build finished.") + + """ + self.log.debug(f"Running script at event: {event}") + exec(script, locals()) diff --git a/client/ayon_maya/startup/userSetup.py b/client/ayon_maya/startup/userSetup.py new file mode 100644 index 00000000..600864fd --- /dev/null +++ b/client/ayon_maya/startup/userSetup.py @@ -0,0 +1,50 @@ +import os + +from ayon_core.settings import get_project_settings +from ayon_core.pipeline import install_host, get_current_project_name +from ayon_maya.api import MayaHost + +from maya import cmds + + +host = MayaHost() +install_host(host) + +print("Starting AYON usersetup...") + +project_name = get_current_project_name() +settings = get_project_settings(project_name) + +# Loading plugins explicitly. +explicit_plugins_loading = settings["maya"]["explicit_plugins_loading"] +if explicit_plugins_loading["enabled"]: + def _explicit_load_plugins(): + for plugin in explicit_plugins_loading["plugins_to_load"]: + if plugin["enabled"]: + print("Loading plug-in: " + plugin["name"]) + try: + cmds.loadPlugin(plugin["name"], quiet=True) + except RuntimeError as e: + print(e) + + # We need to load plugins deferred as loading them directly does not work + # correctly due to Maya's initialization. + cmds.evalDeferred( + _explicit_load_plugins, + lowestPriority=True + ) + +# Open Workfile Post Initialization. +key = "AYON_OPEN_WORKFILE_POST_INITIALIZATION" +if bool(int(os.environ.get(key, "0"))): + def _log_and_open(): + path = os.environ["AYON_LAST_WORKFILE"] + print("Opening \"{}\"".format(path)) + cmds.file(path, open=True, force=True) + cmds.evalDeferred( + _log_and_open, + lowestPriority=True + ) + + +print("Finished AYON usersetup.") diff --git a/client/ayon_maya/tools/__init__.py b/client/ayon_maya/tools/__init__.py new file mode 100644 index 00000000..0dd6de23 --- /dev/null +++ b/client/ayon_maya/tools/__init__.py @@ -0,0 +1,27 @@ +from ayon_core.tools.utils.host_tools import qt_app_context + + +class MayaToolsSingleton: + _look_assigner = None + + +def get_look_assigner_tool(parent): + """Create, cache and return look assigner tool window.""" + if MayaToolsSingleton._look_assigner is None: + from .mayalookassigner import MayaLookAssignerWindow + mayalookassigner_window = MayaLookAssignerWindow(parent) + MayaToolsSingleton._look_assigner = mayalookassigner_window + return MayaToolsSingleton._look_assigner + + +def show_look_assigner(parent=None): + """Look manager is Maya specific tool for look management.""" + + with qt_app_context(): + look_assigner_tool = get_look_assigner_tool(parent) + look_assigner_tool.show() + + # Pull window to the front. + look_assigner_tool.raise_() + look_assigner_tool.activateWindow() + look_assigner_tool.showNormal() diff --git a/client/ayon_maya/tools/mayalookassigner/LICENSE b/client/ayon_maya/tools/mayalookassigner/LICENSE new file mode 100644 index 00000000..852751db --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Colorbleed + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/client/ayon_maya/tools/mayalookassigner/__init__.py b/client/ayon_maya/tools/mayalookassigner/__init__.py new file mode 100644 index 00000000..5e407777 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/__init__.py @@ -0,0 +1,9 @@ +from .app import ( + MayaLookAssignerWindow, + show +) + + +__all__ = [ + "MayaLookAssignerWindow", + "show"] diff --git a/client/ayon_maya/tools/mayalookassigner/alembic.py b/client/ayon_maya/tools/mayalookassigner/alembic.py new file mode 100644 index 00000000..6885e923 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/alembic.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +"""Tools for loading looks to vray proxies.""" +import os +from collections import defaultdict +import logging + +import six + +import alembic.Abc + + +log = logging.getLogger(__name__) + + +def get_alembic_paths_by_property(filename, attr, verbose=False): + # type: (str, str, bool) -> dict + """Return attribute value per objects in the Alembic file. + + Reads an Alembic archive hierarchy and retrieves the + value from the `attr` properties on the objects. + + Args: + filename (str): Full path to Alembic archive to read. + attr (str): Id attribute. + verbose (bool): Whether to verbosely log missing attributes. + + Returns: + dict: Mapping of node full path with its id + + """ + # Normalize alembic path + filename = os.path.normpath(filename) + filename = filename.replace("\\", "/") + filename = str(filename) # path must be string + + try: + archive = alembic.Abc.IArchive(filename) + except RuntimeError: + # invalid alembic file - probably vrmesh + log.warning("{} is not an alembic file".format(filename)) + return {} + root = archive.getTop() + + iterator = list(root.children) + obj_ids = {} + + for obj in iterator: + name = obj.getFullName() + + # include children for coming iterations + iterator.extend(obj.children) + + props = obj.getProperties() + if props.getNumProperties() == 0: + # Skip those without properties, e.g. '/materials' in a gpuCache + continue + + # THe custom attribute is under the properties' first container under + # the ".arbGeomParams" + prop = props.getProperty(0) # get base property + + _property = None + try: + geo_params = prop.getProperty('.arbGeomParams') + _property = geo_params.getProperty(attr) + except KeyError: + if verbose: + log.debug("Missing attr on: {0}".format(name)) + continue + + if not _property.isConstant(): + log.warning("Id not constant on: {0}".format(name)) + + # Get first value sample + value = _property.getValue()[0] + + obj_ids[name] = value + + return obj_ids + + +def get_alembic_ids_cache(path): + # type: (str) -> dict + """Build a id to node mapping in Alembic file. + + Nodes without IDs are ignored. + + Returns: + dict: Mapping of id to nodes in the Alembic. + + """ + node_ids = get_alembic_paths_by_property(path, attr="cbId") + id_nodes = defaultdict(list) + for node, _id in six.iteritems(node_ids): + id_nodes[_id].append(node) + + return dict(six.iteritems(id_nodes)) diff --git a/client/ayon_maya/tools/mayalookassigner/app.py b/client/ayon_maya/tools/mayalookassigner/app.py new file mode 100644 index 00000000..2937b729 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/app.py @@ -0,0 +1,317 @@ +import sys +import time +import logging + +import ayon_api +from qtpy import QtWidgets, QtCore + +from ayon_core import style +from ayon_core.pipeline import get_current_project_name +from ayon_core.tools.utils.lib import qt_app_context +from ayon_maya.api.lib import ( + assign_look_by_version, + get_main_window +) + +from maya import cmds +# old api for MFileIO +import maya.OpenMaya +import maya.api.OpenMaya as om + +from .widgets import ( + AssetOutliner, + LookOutliner +) +from .commands import ( + get_workfile, + remove_unused_looks +) +from .vray_proxies import vrayproxy_assign_look +from . import arnold_standin + +module = sys.modules[__name__] +module.window = None + + +class MayaLookAssignerWindow(QtWidgets.QWidget): + + def __init__(self, parent=None): + super(MayaLookAssignerWindow, self).__init__(parent=parent) + + self.log = logging.getLogger(__name__) + + # Store callback references + self._callbacks = [] + self._connections_set_up = False + + filename = get_workfile() + + self.setObjectName("lookManager") + self.setWindowTitle("Look Manager 1.4.0 - [{}]".format(filename)) + self.setWindowFlags(QtCore.Qt.Window) + self.setParent(parent) + + self.resize(750, 500) + + self.setup_ui() + + # Force refresh check on initialization + self._on_renderlayer_switch() + + def setup_ui(self): + """Build the UI""" + + main_splitter = QtWidgets.QSplitter(self) + + # Assets (left) + asset_outliner = AssetOutliner(main_splitter) + + # Looks (right) + looks_widget = QtWidgets.QWidget(main_splitter) + + look_outliner = LookOutliner(looks_widget) # Database look overview + + assign_selected = QtWidgets.QCheckBox( + "Assign to selected only", looks_widget + ) + assign_selected.setToolTip("Whether to assign only to selected nodes " + "or to the full asset") + remove_unused_btn = QtWidgets.QPushButton( + "Remove Unused Looks", looks_widget + ) + + looks_layout = QtWidgets.QVBoxLayout(looks_widget) + looks_layout.addWidget(look_outliner) + looks_layout.addWidget(assign_selected) + looks_layout.addWidget(remove_unused_btn) + + main_splitter.addWidget(asset_outliner) + main_splitter.addWidget(looks_widget) + main_splitter.setSizes([350, 200]) + + # Footer + status = QtWidgets.QStatusBar(self) + status.setSizeGripEnabled(False) + status.setFixedHeight(25) + warn_layer = QtWidgets.QLabel( + "Current Layer is not defaultRenderLayer", self + ) + warn_layer.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter) + warn_layer.setStyleSheet("color: #DD5555; font-weight: bold;") + warn_layer.setFixedHeight(25) + + footer = QtWidgets.QHBoxLayout() + footer.setContentsMargins(0, 0, 0, 0) + footer.addWidget(status) + footer.addWidget(warn_layer) + + # Build up widgets + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setSpacing(0) + main_layout.addWidget(main_splitter) + main_layout.addLayout(footer) + + # Set column width + asset_outliner.view.setColumnWidth(0, 200) + look_outliner.view.setColumnWidth(0, 150) + + asset_outliner.selection_changed.connect( + self.on_asset_selection_changed) + + asset_outliner.refreshed.connect( + lambda: self.echo("Loaded assets..") + ) + + look_outliner.menu_apply_action.connect(self.on_process_selected) + remove_unused_btn.clicked.connect(remove_unused_looks) + + # Open widgets + self.asset_outliner = asset_outliner + self.look_outliner = look_outliner + self.status = status + self.warn_layer = warn_layer + + # Buttons + self.remove_unused = remove_unused_btn + self.assign_selected = assign_selected + + self._first_show = True + + def setup_connections(self): + """Connect interactive widgets with actions""" + if self._connections_set_up: + return + + # Maya renderlayer switch callback + callback = om.MEventMessage.addEventCallback( + "renderLayerManagerChange", + self._on_renderlayer_switch + ) + self._callbacks.append(callback) + self._connections_set_up = True + + def remove_connection(self): + # Delete callbacks + for callback in self._callbacks: + om.MMessage.removeCallback(callback) + + self._callbacks = [] + self._connections_set_up = False + + def showEvent(self, event): + self.setup_connections() + super(MayaLookAssignerWindow, self).showEvent(event) + if self._first_show: + self._first_show = False + self.setStyleSheet(style.load_stylesheet()) + + def closeEvent(self, event): + self.remove_connection() + super(MayaLookAssignerWindow, self).closeEvent(event) + + def _on_renderlayer_switch(self, *args): + """Callback that updates on Maya renderlayer switch""" + + if maya.OpenMaya.MFileIO.isNewingFile(): + # Don't perform a check during file open or file new as + # the renderlayers will not be in a valid state yet. + return + + layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + if layer != "defaultRenderLayer": + self.warn_layer.show() + else: + self.warn_layer.hide() + + def echo(self, message): + self.status.showMessage(message, 1500) + + def refresh(self): + """Refresh the content""" + + # Get all containers and information + self.asset_outliner.clear() + found_items = self.asset_outliner.get_all_assets() + if not found_items: + self.look_outliner.clear() + + def on_asset_selection_changed(self): + """Get selected items from asset loader and fill look outliner""" + + items = self.asset_outliner.get_selected_items() + self.look_outliner.clear() + self.look_outliner.add_items(items) + + def on_process_selected(self): + """Process all selected looks for the selected assets""" + + assets = self.asset_outliner.get_selected_items() + assert assets, "No asset selected" + + # Collect the looks we want to apply (by name) + look_items = self.look_outliner.get_selected_items() + looks = {look["product"] for look in look_items} + + selection = self.assign_selected.isChecked() + asset_nodes = self.asset_outliner.get_nodes(selection=selection) + + project_name = get_current_project_name() + start = time.time() + for i, (asset, item) in enumerate(asset_nodes.items()): + + # Label prefix + prefix = "({}/{})".format(i + 1, len(asset_nodes)) + + # Assign the first matching look relevant for this asset + # (since assigning multiple to the same nodes makes no sense) + assign_look = next( + ( + product_entity + for product_entity in item["looks"] + if product_entity["name"] in looks + ), + None + ) + if not assign_look: + self.echo( + "{} No matching selected look for {}".format(prefix, asset) + ) + continue + + # Get the latest version of this asset's look product + version_entity = ayon_api.get_last_version_by_product_id( + project_name, assign_look["id"], fields={"id"} + ) + + product_name = assign_look["name"] + self.echo("{} Assigning {} to {}\t".format( + prefix, product_name, asset + )) + nodes = item["nodes"] + + # Assign Vray Proxy look. + if cmds.pluginInfo('vrayformaya', query=True, loaded=True): + self.echo("Getting vray proxy nodes ...") + vray_proxies = set(cmds.ls(type="VRayProxy", long=True)) + + for vp in vray_proxies: + if vp in nodes: + vrayproxy_assign_look(vp, product_name) + + nodes = list(set(nodes).difference(vray_proxies)) + else: + self.echo( + "Could not assign to VRayProxy because vrayformaya plugin " + "is not loaded." + ) + + # Assign Arnold Standin look. + if cmds.pluginInfo("mtoa", query=True, loaded=True): + arnold_standins = set(cmds.ls(type="aiStandIn", long=True)) + + for standin in arnold_standins: + if standin in nodes: + arnold_standin.assign_look(standin, product_name) + + nodes = list(set(nodes).difference(arnold_standins)) + else: + self.echo( + "Could not assign to aiStandIn because mtoa plugin is not " + "loaded." + ) + + # Assign look + if nodes: + assign_look_by_version( + nodes, version_id=version_entity["id"] + ) + + end = time.time() + + self.echo("Finished assigning.. ({0:.3f}s)".format(end - start)) + + +def show(): + """Display Loader GUI + + Arguments: + debug (bool, optional): Run loader in debug-mode, + defaults to False + + """ + + try: + module.window.close() + del module.window + except (RuntimeError, AttributeError): + pass + + # Get Maya main window + mainwindow = get_main_window() + + with qt_app_context(): + window = MayaLookAssignerWindow(parent=mainwindow) + window.show() + + module.window = window diff --git a/client/ayon_maya/tools/mayalookassigner/arnold_standin.py b/client/ayon_maya/tools/mayalookassigner/arnold_standin.py new file mode 100644 index 00000000..c285b857 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/arnold_standin.py @@ -0,0 +1,263 @@ +import os +import json +from collections import defaultdict +import logging + +from maya import cmds +import ayon_api + +from ayon_core.pipeline import get_current_project_name +from ayon_maya import api + +from . import lib +from .alembic import get_alembic_ids_cache +from .usd import is_usd_lib_supported, get_usd_ids_cache + + +log = logging.getLogger(__name__) + + +ATTRIBUTE_MAPPING = { + "primaryVisibility": "visibility", # Camera + "castsShadows": "visibility", # Shadow + "receiveShadows": "receive_shadows", + "aiSelfShadows": "self_shadows", + "aiOpaque": "opaque", + "aiMatte": "matte", + "aiVisibleInDiffuseTransmission": "visibility", + "aiVisibleInSpecularTransmission": "visibility", + "aiVisibleInVolume": "visibility", + "aiVisibleInDiffuseReflection": "visibility", + "aiVisibleInSpecularReflection": "visibility", + "aiSubdivUvSmoothing": "subdiv_uv_smoothing", + "aiDispHeight": "disp_height", + "aiDispPadding": "disp_padding", + "aiDispZeroValue": "disp_zero_value", + "aiStepSize": "step_size", + "aiVolumePadding": "volume_padding", + "aiSubdivType": "subdiv_type", + "aiSubdivIterations": "subdiv_iterations" +} + + +def calculate_visibility_mask(attributes): + # https://arnoldsupport.com/2018/11/21/backdoor-setting-visibility/ + mapping = { + "primaryVisibility": 1, # Camera + "castsShadows": 2, # Shadow + "aiVisibleInDiffuseTransmission": 4, + "aiVisibleInSpecularTransmission": 8, + "aiVisibleInVolume": 16, + "aiVisibleInDiffuseReflection": 32, + "aiVisibleInSpecularReflection": 64 + } + mask = 255 + for attr, value in mapping.items(): + if attributes.get(attr, True): + continue + + mask -= value + + return mask + + +def get_nodes_by_id(standin): + """Get node id from aiStandIn via json sidecar. + + Args: + standin (string): aiStandIn node. + + Returns: + (dict): Dictionary with node full name/path and id. + """ + path = cmds.getAttr(standin + ".dso") + + if path.endswith(".abc"): + # Support alembic files directly + return get_alembic_ids_cache(path) + + elif ( + is_usd_lib_supported and + any(path.endswith(ext) for ext in [".usd", ".usda", ".usdc"]) + ): + # Support usd files directly + return get_usd_ids_cache(path) + + json_path = None + for f in os.listdir(os.path.dirname(path)): + if f.endswith(".json"): + json_path = os.path.join(os.path.dirname(path), f) + break + + if not json_path: + log.warning("Could not find json file for {}.".format(standin)) + return {} + + with open(json_path, "r") as f: + return json.load(f) + + +def shading_engine_assignments(shading_engine, attribute, nodes, assignments): + """Full assignments with shader or disp_map. + + Args: + shading_engine (string): Shading engine for material. + attribute (string): "surfaceShader" or "displacementShader" + nodes: (list): Nodes paths relative to aiStandIn. + assignments (dict): Assignments by nodes. + """ + shader_inputs = cmds.listConnections( + shading_engine + "." + attribute, source=True + ) + if not shader_inputs: + log.info( + "Shading engine \"{}\" missing input \"{}\"".format( + shading_engine, attribute + ) + ) + return + + # Strip off component assignments + for i, node in enumerate(nodes): + if "." in node: + log.warning( + "Converting face assignment to full object assignment. This " + "conversion can be lossy: {}".format(node) + ) + nodes[i] = node.split(".")[0] + + shader_type = "shader" if attribute == "surfaceShader" else "disp_map" + assignment = "{}='{}'".format(shader_type, shader_inputs[0]) + for node in nodes: + assignments[node].append(assignment) + + +def assign_look(standin, product_name): + log.info("Assigning {} to {}.".format(product_name, standin)) + + nodes_by_id = get_nodes_by_id(standin) + + # Group by folder id so we run over the look per folder + node_ids_by_folder_id = defaultdict(set) + for node_id in nodes_by_id: + folder_id = node_id.split(":", 1)[0] + node_ids_by_folder_id[folder_id].add(node_id) + + project_name = get_current_project_name() + for folder_id, node_ids in node_ids_by_folder_id.items(): + + # Get latest look version + version_entity = ayon_api.get_last_version_by_product_name( + project_name, + product_name, + folder_id, + fields={"id"} + ) + if not version_entity: + log.info("Didn't find last version for product name {}".format( + product_name + )) + continue + version_id = version_entity["id"] + + relationships = lib.get_look_relationships(version_id) + shader_nodes, container_node = lib.load_look(version_id) + namespace = shader_nodes[0].split(":")[0] + + # Get only the node ids and paths related to this folder + # And get the shader edits the look supplies + asset_nodes_by_id = { + node_id: nodes_by_id[node_id] for node_id in node_ids + } + edits = list( + api.lib.iter_shader_edits( + relationships, shader_nodes, asset_nodes_by_id + ) + ) + + # Create assignments + node_assignments = {} + for edit in edits: + for node in edit["nodes"]: + if node not in node_assignments: + node_assignments[node] = [] + + if edit["action"] == "assign": + if not cmds.ls(edit["shader"], type="shadingEngine"): + log.info("Skipping non-shader: %s" % edit["shader"]) + continue + + shading_engine_assignments( + shading_engine=edit["shader"], + attribute="surfaceShader", + nodes=edit["nodes"], + assignments=node_assignments + ) + shading_engine_assignments( + shading_engine=edit["shader"], + attribute="displacementShader", + nodes=edit["nodes"], + assignments=node_assignments + ) + + if edit["action"] == "setattr": + visibility = False + for attr, value in edit["attributes"].items(): + if attr not in ATTRIBUTE_MAPPING: + log.warning( + "Skipping setting attribute {} on {} because it is" + " not recognized.".format(attr, edit["nodes"]) + ) + continue + + if isinstance(value, str): + value = "'{}'".format(value) + + if ATTRIBUTE_MAPPING[attr] == "visibility": + visibility = True + continue + + assignment = "{}={}".format(ATTRIBUTE_MAPPING[attr], value) + + for node in edit["nodes"]: + node_assignments[node].append(assignment) + + if visibility: + mask = calculate_visibility_mask(edit["attributes"]) + assignment = "visibility={}".format(mask) + + for node in edit["nodes"]: + node_assignments[node].append(assignment) + + # Assign shader + # Clear all current shader assignments + plug = standin + ".operators" + num = cmds.getAttr(plug, size=True) + for i in reversed(range(num)): + cmds.removeMultiInstance("{}[{}]".format(plug, i), b=True) + + # Create new assignment overrides + index = 0 + for node, assignments in node_assignments.items(): + if not assignments: + continue + + with api.lib.maintained_selection(): + operator = cmds.createNode("aiSetParameter") + operator = cmds.rename(operator, namespace + ":" + operator) + + cmds.setAttr(operator + ".selection", node, type="string") + for i, assignment in enumerate(assignments): + cmds.setAttr( + "{}.assignment[{}]".format(operator, i), + assignment, + type="string" + ) + + cmds.connectAttr( + operator + ".out", "{}[{}]".format(plug, index) + ) + + index += 1 + + cmds.sets(operator, edit=True, addElement=container_node) diff --git a/client/ayon_maya/tools/mayalookassigner/commands.py b/client/ayon_maya/tools/mayalookassigner/commands.py new file mode 100644 index 00000000..54b1cff7 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/commands.py @@ -0,0 +1,199 @@ +import os +import logging +from collections import defaultdict + +import ayon_api +import maya.cmds as cmds + +from ayon_core.pipeline import ( + remove_container, + registered_host, + get_current_project_name, +) +from ayon_maya.api import lib + +from .vray_proxies import get_alembic_ids_cache +from . import arnold_standin + +log = logging.getLogger(__name__) + + +def get_workfile(): + path = cmds.file(query=True, sceneName=True) or "untitled" + return os.path.basename(path) + + +def get_workfolder(): + return os.path.dirname(cmds.file(query=True, sceneName=True)) + + +def select(nodes): + cmds.select(nodes) + + +def get_namespace_from_node(node): + """Get the namespace from the given node + + Args: + node (str): name of the node + + Returns: + namespace (str) + + """ + parts = node.rsplit("|", 1)[-1].rsplit(":", 1) + return parts[0] if len(parts) > 1 else u":" + + +def get_selected_nodes(): + """Get information from current selection""" + + selection = cmds.ls(selection=True, long=True) + hierarchy = lib.get_all_children(selection, + ignore_intermediate_objects=True) + return list(hierarchy.union(selection)) + + +def get_all_asset_nodes(): + """Get all assets from the scene, container based + + Returns: + list: list of dictionaries + """ + return cmds.ls(dag=True, noIntermediate=True, long=True) + + +def create_folder_id_hash(nodes): + """Create a hash based on cbId attribute value + Args: + nodes (list): a list of nodes + + Returns: + dict + """ + node_id_hash = defaultdict(list) + for node in nodes: + # iterate over content of reference node + if cmds.nodeType(node) == "reference": + ref_hashes = create_folder_id_hash( + list(set(cmds.referenceQuery(node, nodes=True, dp=True)))) + for folder_id, ref_nodes in ref_hashes.items(): + node_id_hash[folder_id] += ref_nodes + elif cmds.pluginInfo('vrayformaya', query=True, + loaded=True) and cmds.nodeType( + node) == "VRayProxy": + path = cmds.getAttr("{}.fileName".format(node)) + ids = get_alembic_ids_cache(path) + for k, _ in ids.items(): + id = k.split(":")[0] + node_id_hash[id].append(node) + elif cmds.nodeType(node) == "aiStandIn": + for id, _ in arnold_standin.get_nodes_by_id(node).items(): + id = id.split(":")[0] + node_id_hash[id].append(node) + else: + value = lib.get_id(node) + if value is None: + continue + + folder_id = value.split(":")[0] + node_id_hash[folder_id].append(node) + + return dict(node_id_hash) + + +def create_items_from_nodes(nodes): + """Create an item for the view based the container and content of it + + It fetches the look document based on the folder id found in the content. + The item will contain all important information for the tool to work. + + If there is an folder id which is not registered in the project's collection + it will log a warning message. + + Args: + nodes (list): list of maya nodes + + Returns: + list of dicts + + """ + + folder_view_items = [] + + id_hashes = create_folder_id_hash(nodes) + + if not id_hashes: + log.warning("No id hashes") + return folder_view_items + + project_name = get_current_project_name() + folder_ids = set(id_hashes.keys()) + + folder_entities = ayon_api.get_folders( + project_name, folder_ids, fields={"id", "path"} + ) + folder_entities_by_id = { + folder_entity["id"]: folder_entity + for folder_entity in folder_entities + } + + for folder_id, id_nodes in id_hashes.items(): + folder_entity = folder_entities_by_id.get(folder_id) + # Skip if folder id is not found + if not folder_entity: + log.warning( + "Id found on {num} nodes for which no folder is found database," + " skipping '{folder_id}'".format( + num=len(nodes), + folder_id=folder_id + ) + ) + continue + + # Collect available look products for this folder + looks = lib.list_looks(project_name, folder_entity["id"]) + + # Collect namespaces the folder is found in + namespaces = set() + for node in id_nodes: + namespace = get_namespace_from_node(node) + namespaces.add(namespace) + + folder_view_items.append({ + "label": folder_entity["path"], + "folder_entity": folder_entity, + "looks": looks, + "namespaces": namespaces + }) + + return folder_view_items + + +def remove_unused_looks(): + """Removes all loaded looks for which none of the shaders are used. + + This will cleanup all loaded "LookLoader" containers that are unused in + the current scene. + + """ + + host = registered_host() + + unused = [] + for container in host.ls(): + if container['loader'] == "LookLoader": + members = lib.get_container_members(container['objectName']) + look_sets = cmds.ls(members, type="objectSet") + for look_set in look_sets: + # If the set is used than we consider this look *in use* + if cmds.sets(look_set, query=True): + break + else: + unused.append(container) + + for container in unused: + log.info("Removing unused look container: %s", container['objectName']) + remove_container(container) + + log.info("Finished removing unused looks. (see log for details)") diff --git a/client/ayon_maya/tools/mayalookassigner/lib.py b/client/ayon_maya/tools/mayalookassigner/lib.py new file mode 100644 index 00000000..5417db26 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/lib.py @@ -0,0 +1,88 @@ +import json +import logging + +from ayon_api import get_representation_by_name + +from ayon_core.pipeline import ( + get_current_project_name, + get_representation_path, + registered_host, + discover_loader_plugins, + loaders_from_representation, + load_container +) +from ayon_maya.api import lib + + +log = logging.getLogger(__name__) + + +def get_look_relationships(version_id): + # type: (str) -> dict + """Get relations for the look. + + Args: + version_id (str): Parent version Id. + + Returns: + dict: Dictionary of relations. + """ + + project_name = get_current_project_name() + json_representation = get_representation_by_name( + project_name, "json", version_id + ) + + # Load relationships + shader_relation = get_representation_path(json_representation) + with open(shader_relation, "r") as f: + relationships = json.load(f) + + return relationships + + +def load_look(version_id): + # type: (str) -> list + """Load look from version. + + Get look from version and invoke Loader for it. + + Args: + version_id (str): Version ID + + Returns: + list of shader nodes. + + """ + + project_name = get_current_project_name() + # Get representations of shader file and relationships + look_representation = get_representation_by_name( + project_name, "ma", version_id + ) + + # See if representation is already loaded, if so reuse it. + host = registered_host() + representation_id = look_representation["id"] + for container in host.ls(): + if (container['loader'] == "LookLoader" and + container['representation'] == representation_id): + log.info("Reusing loaded look ...") + container_node = container['objectName'] + break + else: + log.info("Using look for the first time ...") + + # Load file + all_loaders = discover_loader_plugins() + loaders = loaders_from_representation(all_loaders, representation_id) + loader = next( + (i for i in loaders if i.__name__ == "LookLoader"), None) + if loader is None: + raise RuntimeError("Could not find LookLoader, this is a bug") + + # Reference the look file + with lib.maintained_selection(): + container_node = load_container(loader, look_representation)[0] + + return lib.get_container_members(container_node), container_node diff --git a/client/ayon_maya/tools/mayalookassigner/models.py b/client/ayon_maya/tools/mayalookassigner/models.py new file mode 100644 index 00000000..b0807be6 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/models.py @@ -0,0 +1,134 @@ +from collections import defaultdict + +from qtpy import QtCore +import qtawesome + +from ayon_core.tools.utils import models +from ayon_core.style import get_default_entity_icon_color + + +class AssetModel(models.TreeModel): + + Columns = ["label"] + + def __init__(self, *args, **kwargs): + super(AssetModel, self).__init__(*args, **kwargs) + + self._icon_color = get_default_entity_icon_color() + + def add_items(self, items): + """ + Add items to model with needed data + Args: + items(list): collection of item data + + Returns: + None + """ + + self.beginResetModel() + + # Add the items sorted by label + def sorter(x): + return x["label"] + + for item in sorted(items, key=sorter): + + asset_item = models.Item() + asset_item.update(item) + asset_item["icon"] = "folder" + + # Add namespace children + namespaces = item["namespaces"] + for namespace in sorted(namespaces): + child = models.Item() + child.update(item) + child.update({ + "label": (namespace if namespace != ":" + else "(no namespace)"), + "namespace": namespace, + "looks": item["looks"], + "icon": "folder-o" + }) + asset_item.add_child(child) + + self.add_child(asset_item) + + self.endResetModel() + + def data(self, index, role): + + if not index.isValid(): + return + + if role == models.TreeModel.ItemRole: + node = index.internalPointer() + return node + + # Add icon + if role == QtCore.Qt.DecorationRole: + if index.column() == 0: + node = index.internalPointer() + icon = node.get("icon") + if icon: + return qtawesome.icon( + "fa.{0}".format(icon), + color=self._icon_color + ) + + return super(AssetModel, self).data(index, role) + + +class LookModel(models.TreeModel): + """Model displaying a list of looks and matches for assets""" + + Columns = ["label", "match"] + + def add_items(self, items): + """Add items to model with needed data + + An item exists of: + { + "product": 'name of product', + "asset": asset_document + } + + Args: + items(list): collection of item data + + Returns: + None + """ + + self.beginResetModel() + + # Collect the assets per look name (from the items of the AssetModel) + look_products = defaultdict(list) + for asset_item in items: + folder_entity = asset_item["folder_entity"] + for look in asset_item["looks"]: + look_products[look["name"]].append(folder_entity) + + for product_name in sorted(look_products.keys()): + folder_entities = look_products[product_name] + + # Define nice label without "look" prefix for readability + label = ( + product_name + if not product_name.startswith("look") + else product_name[4:] + ) + + item_node = models.Item() + item_node["label"] = label + item_node["product"] = product_name + + # Amount of matching assets for this look + item_node["match"] = len(folder_entities) + + # Store the assets that have this product available + item_node["folder_entities"] = folder_entities + + self.add_child(item_node) + + self.endResetModel() diff --git a/client/ayon_maya/tools/mayalookassigner/usd.py b/client/ayon_maya/tools/mayalookassigner/usd.py new file mode 100644 index 00000000..6b5cb2f0 --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/usd.py @@ -0,0 +1,38 @@ +from collections import defaultdict + +try: + from pxr import Usd + is_usd_lib_supported = True +except ImportError: + is_usd_lib_supported = False + + +def get_usd_ids_cache(path): + # type: (str) -> dict + """Build a id to node mapping in a USD file. + + Nodes without IDs are ignored. + + Returns: + dict: Mapping of id to nodes in the USD file. + + """ + if not is_usd_lib_supported: + raise RuntimeError("No pxr.Usd python library available.") + + stage = Usd.Stage.Open(path) + ids = {} + for prim in stage.Traverse(): + attr = prim.GetAttribute("userProperties:cbId") + if not attr.IsValid(): + continue + value = attr.Get() + if not value: + continue + path = str(prim.GetPath()) + ids[path] = value + + cache = defaultdict(list) + for path, value in ids.items(): + cache[value].append(path) + return dict(cache) diff --git a/client/ayon_maya/tools/mayalookassigner/views.py b/client/ayon_maya/tools/mayalookassigner/views.py new file mode 100644 index 00000000..489c194f --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/views.py @@ -0,0 +1,47 @@ +from qtpy import QtWidgets, QtCore + + +class View(QtWidgets.QTreeView): + data_changed = QtCore.Signal() + + def __init__(self, parent=None): + super(View, self).__init__(parent=parent) + + # view settings + self.setAlternatingRowColors(False) + self.setSortingEnabled(True) + self.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection) + self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + + def get_indices(self): + """Get the selected rows""" + selection_model = self.selectionModel() + return selection_model.selectedRows() + + def extend_to_children(self, indices): + """Extend the indices to the children indices. + + Top-level indices are extended to its children indices. Sub-items + are kept as is. + + :param indices: The indices to extend. + :type indices: list + + :return: The children indices + :rtype: list + """ + + subitems = set() + for i in indices: + valid_parent = i.parent().isValid() + if valid_parent and i not in subitems: + subitems.add(i) + else: + # is top level node + model = i.model() + rows = model.rowCount(parent=i) + for row in range(rows): + child = model.index(row, 0, parent=i) + subitems.add(child) + + return list(subitems) diff --git a/client/ayon_maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_maya/tools/mayalookassigner/vray_proxies.py new file mode 100644 index 00000000..6b451abb --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/vray_proxies.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +"""Tools for loading looks to vray proxies.""" +from collections import defaultdict +import logging + +from maya import cmds +import ayon_api + +from ayon_core.pipeline import get_current_project_name +import ayon_maya.api.lib as maya_lib +from . import lib +from .alembic import get_alembic_ids_cache + + +log = logging.getLogger(__name__) + + +def assign_vrayproxy_shaders(vrayproxy, assignments): + # type: (str, dict) -> None + """Assign shaders to content of Vray Proxy. + + This will create shader overrides on Vray Proxy to assign shaders to its + content. + + Todo: + Allow to optimize and assign a single shader to multiple shapes at + once or maybe even set it to the highest available path? + + Args: + vrayproxy (str): Name of Vray Proxy + assignments (dict): Mapping of shader assignments. + + Returns: + None + + """ + # Clear all current shader assignments + plug = vrayproxy + ".shaders" + num = cmds.getAttr(plug, size=True) + for i in reversed(range(num)): + cmds.removeMultiInstance("{}[{}]".format(plug, i), b=True) + + # Create new assignment overrides + index = 0 + for material, paths in assignments.items(): + for path in paths: + plug = "{}.shaders[{}]".format(vrayproxy, index) + cmds.setAttr(plug + ".shadersNames", path, type="string") + cmds.connectAttr(material + ".outColor", + plug + ".shadersConnections", force=True) + index += 1 + + +def vrayproxy_assign_look(vrayproxy, product_name="lookMain"): + # type: (str, str) -> None + """Assign look to vray proxy. + + Args: + vrayproxy (str): Name of vrayproxy to apply look to. + product_name (str): Name of look product. + + Returns: + None + + """ + path = cmds.getAttr(vrayproxy + ".fileName") + + nodes_by_id = get_alembic_ids_cache(path) + if not nodes_by_id: + log.warning("Alembic file has no cbId attributes: %s" % path) + return + + # Group by asset id so we run over the look per asset + node_ids_by_asset_id = defaultdict(set) + for node_id in nodes_by_id: + folder_id = node_id.split(":", 1)[0] + node_ids_by_asset_id[folder_id].add(node_id) + + project_name = get_current_project_name() + for folder_id, node_ids in node_ids_by_asset_id.items(): + + # Get latest look version + version_entity = ayon_api.get_last_version_by_product_name( + project_name, + product_name, + folder_id, + fields={"id"} + ) + if not version_entity: + print("Didn't find last version for product name {}".format( + product_name + )) + continue + version_id = version_entity["id"] + + relationships = lib.get_look_relationships(version_id) + shadernodes, _ = lib.load_look(version_id) + + # Get only the node ids and paths related to this asset + # And get the shader edits the look supplies + asset_nodes_by_id = { + node_id: nodes_by_id[node_id] for node_id in node_ids + } + edits = list( + maya_lib.iter_shader_edits( + relationships, shadernodes, asset_nodes_by_id + ) + ) + + # Create assignments + assignments = {} + for edit in edits: + if edit["action"] == "assign": + nodes = edit["nodes"] + shader = edit["shader"] + if not cmds.ls(shader, type="shadingEngine"): + print("Skipping non-shader: %s" % shader) + continue + + inputs = cmds.listConnections( + shader + ".surfaceShader", source=True) + if not inputs: + print("Shading engine missing material: %s" % shader) + + # Strip off component assignments + for i, node in enumerate(nodes): + if "." in node: + log.warning( + ("Converting face assignment to full object " + "assignment. This conversion can be lossy: " + "{}").format(node)) + nodes[i] = node.split(".")[0] + + material = inputs[0] + assignments[material] = nodes + + assign_vrayproxy_shaders(vrayproxy, assignments) diff --git a/client/ayon_maya/tools/mayalookassigner/widgets.py b/client/ayon_maya/tools/mayalookassigner/widgets.py new file mode 100644 index 00000000..f345b87e --- /dev/null +++ b/client/ayon_maya/tools/mayalookassigner/widgets.py @@ -0,0 +1,256 @@ +import logging +from collections import defaultdict + +from qtpy import QtWidgets, QtCore + +from ayon_core.tools.utils.models import TreeModel +from ayon_core.tools.utils.lib import ( + preserve_expanded_rows, + preserve_selection, +) + +from .models import ( + AssetModel, + LookModel +) +from . import commands +from .views import View + +from maya import cmds + + +class AssetOutliner(QtWidgets.QWidget): + refreshed = QtCore.Signal() + selection_changed = QtCore.Signal() + + def __init__(self, parent=None): + super(AssetOutliner, self).__init__(parent) + + title = QtWidgets.QLabel("Assets", self) + title.setAlignment(QtCore.Qt.AlignCenter) + title.setStyleSheet("font-weight: bold; font-size: 12px") + + model = AssetModel() + view = View(self) + view.setModel(model) + view.customContextMenuRequested.connect(self.right_mouse_menu) + view.setSortingEnabled(False) + view.setHeaderHidden(True) + view.setIndentation(10) + + from_all_asset_btn = QtWidgets.QPushButton( + "Get All Assets", self + ) + from_selection_btn = QtWidgets.QPushButton( + "Get Assets From Selection", self + ) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(title) + layout.addWidget(from_all_asset_btn) + layout.addWidget(from_selection_btn) + layout.addWidget(view) + + # Build connections + from_selection_btn.clicked.connect(self.get_selected_assets) + from_all_asset_btn.clicked.connect(self.get_all_assets) + + selection_model = view.selectionModel() + selection_model.selectionChanged.connect(self.selection_changed) + + self.view = view + self.model = model + + self.log = logging.getLogger(__name__) + + def clear(self): + self.model.clear() + + # fix looks remaining visible when no items present after "refresh" + # todo: figure out why this workaround is needed. + self.selection_changed.emit() + + def add_items(self, items): + """Add new items to the outliner""" + + self.model.add_items(items) + self.refreshed.emit() + + def get_selected_items(self): + """Get current selected items from view + + Returns: + list: list of dictionaries + """ + + selection_model = self.view.selectionModel() + return [row.data(TreeModel.ItemRole) + for row in selection_model.selectedRows(0)] + + def get_all_assets(self): + """Add all items from the current scene""" + + with preserve_expanded_rows(self.view): + with preserve_selection(self.view): + self.clear() + nodes = commands.get_all_asset_nodes() + items = commands.create_items_from_nodes(nodes) + self.add_items(items) + return len(items) > 0 + + def get_selected_assets(self): + """Add all selected items from the current scene""" + + with preserve_expanded_rows(self.view): + with preserve_selection(self.view): + self.clear() + nodes = commands.get_selected_nodes() + items = commands.create_items_from_nodes(nodes) + self.add_items(items) + + def get_nodes(self, selection=False): + """Find the nodes in the current scene per folder.""" + + items = self.get_selected_items() + + # Collect all nodes by hash (optimization) + if not selection: + nodes = cmds.ls(dag=True, long=True) + else: + nodes = commands.get_selected_nodes() + id_nodes = commands.create_folder_id_hash(nodes) + + # Collect the asset item entries per folder + # and collect the namespaces we'd like to apply + folder_items = {} + namespaces_by_folder_path = defaultdict(set) + for item in items: + folder_entity = item["folder_entity"] + folder_id = folder_entity["id"] + folder_path = folder_entity["path"] + namespaces_by_folder_path[folder_path].add(item.get("namespace")) + + if folder_path in folder_items: + continue + + folder_items[folder_path] = item + folder_items[folder_path]["nodes"] = id_nodes.get(folder_id, []) + + # Filter nodes to namespace (if only namespaces were selected) + for folder_path in folder_items: + namespaces = namespaces_by_folder_path[folder_path] + + # When None is present there should be no filtering + if None in namespaces: + continue + + # Else only namespaces are selected and *not* the top entry so + # we should filter to only those namespaces. + nodes = folder_items[folder_path]["nodes"] + nodes = [node for node in nodes if + commands.get_namespace_from_node(node) in namespaces] + folder_items[folder_path]["nodes"] = nodes + + return folder_items + + def select_asset_from_items(self): + """Select nodes from listed asset""" + + items = self.get_nodes(selection=False) + nodes = [] + for item in items.values(): + nodes.extend(item["nodes"]) + + commands.select(nodes) + + def right_mouse_menu(self, pos): + """Build RMB menu for asset outliner""" + + active = self.view.currentIndex() # index under mouse + active = active.sibling(active.row(), 0) # get first column + globalpos = self.view.viewport().mapToGlobal(pos) + + menu = QtWidgets.QMenu(self.view) + + # Direct assignment + apply_action = QtWidgets.QAction(menu, text="Select nodes") + apply_action.triggered.connect(self.select_asset_from_items) + + if not active.isValid(): + apply_action.setEnabled(False) + + menu.addAction(apply_action) + + menu.exec_(globalpos) + + +class LookOutliner(QtWidgets.QWidget): + menu_apply_action = QtCore.Signal() + + def __init__(self, parent=None): + super(LookOutliner, self).__init__(parent) + + # Looks from database + title = QtWidgets.QLabel("Looks", self) + title.setAlignment(QtCore.Qt.AlignCenter) + title.setStyleSheet("font-weight: bold; font-size: 12px") + title.setAlignment(QtCore.Qt.AlignCenter) + + model = LookModel() + + # Proxy for dynamic sorting + proxy = QtCore.QSortFilterProxyModel() + proxy.setSourceModel(model) + + view = View(self) + view.setModel(proxy) + view.setMinimumHeight(180) + view.setToolTip("Use right mouse button menu for direct actions") + view.customContextMenuRequested.connect(self.right_mouse_menu) + view.sortByColumn(0, QtCore.Qt.AscendingOrder) + + # look manager layout + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.setSpacing(10) + layout.addWidget(title) + layout.addWidget(view) + + self.view = view + self.model = model + + def clear(self): + self.model.clear() + + def add_items(self, items): + self.model.add_items(items) + + def get_selected_items(self): + """Get current selected items from view + + Returns: + list: list of dictionaries + """ + + items = [i.data(TreeModel.ItemRole) for i in self.view.get_indices()] + return [item for item in items if item is not None] + + def right_mouse_menu(self, pos): + """Build RMB menu for look view""" + + active = self.view.currentIndex() # index under mouse + active = active.sibling(active.row(), 0) # get first column + globalpos = self.view.viewport().mapToGlobal(pos) + + if not active.isValid(): + return + + menu = QtWidgets.QMenu(self.view) + + # Direct assignment + apply_action = QtWidgets.QAction(menu, text="Assign looks..") + apply_action.triggered.connect(self.menu_apply_action) + + menu.addAction(apply_action) + + menu.exec_(globalpos) diff --git a/client/ayon_maya/vendor/python/capture.py b/client/ayon_maya/vendor/python/capture.py new file mode 100644 index 00000000..4ccfdb35 --- /dev/null +++ b/client/ayon_maya/vendor/python/capture.py @@ -0,0 +1,919 @@ +"""Maya Capture + +Playblasting with independent viewport, camera and display options + +""" + +import re +import sys +import contextlib +import logging + +from maya import cmds +from maya import mel + +from qtpy import QtGui, QtWidgets + +version_info = (2, 3, 0) + +__version__ = "%s.%s.%s" % version_info +__license__ = "MIT" +logger = logging.getLogger("capture") + + +def capture(camera=None, + width=None, + height=None, + filename=None, + start_frame=None, + end_frame=None, + frame=None, + format='qt', + compression='H.264', + quality=100, + off_screen=False, + viewer=True, + show_ornaments=True, + sound=None, + isolate=None, + maintain_aspect_ratio=True, + overwrite=False, + frame_padding=4, + raw_frame_numbers=False, + camera_options=None, + display_options=None, + viewport_options=None, + viewport2_options=None, + complete_filename=None, + log=None): + """Playblast in an independent panel + + Arguments: + camera (str, optional): Name of camera, defaults to "persp" + width (int, optional): Width of output in pixels + height (int, optional): Height of output in pixels + filename (str, optional): Name of output file. If + none is specified, no files are saved. + start_frame (float, optional): Defaults to current start frame. + end_frame (float, optional): Defaults to current end frame. + frame (float or tuple, optional): A single frame or list of frames. + Use this to capture a single frame or an arbitrary sequence of + frames. + format (str, optional): Name of format, defaults to "qt". + compression (str, optional): Name of compression, defaults to "H.264" + quality (int, optional): The quality of the output, defaults to 100 + off_screen (bool, optional): Whether or not to playblast off screen + viewer (bool, optional): Display results in native player + show_ornaments (bool, optional): Whether or not model view ornaments + (e.g. axis icon, grid and HUD) should be displayed. + sound (str, optional): Specify the sound node to be used during + playblast. When None (default) no sound will be used. + isolate (list): List of nodes to isolate upon capturing + maintain_aspect_ratio (bool, optional): Modify height in order to + maintain aspect ratio. + overwrite (bool, optional): Whether or not to overwrite if file + already exists. If disabled and file exists and error will be + raised. + frame_padding (bool, optional): Number of zeros used to pad file name + for image sequences. + raw_frame_numbers (bool, optional): Whether or not to use the exact + frame numbers from the scene or capture to a sequence starting at + zero. Defaults to False. When set to True `viewer` can't be used + and will be forced to False. + camera_options (dict, optional): Supplied camera options, + using `CameraOptions` + display_options (dict, optional): Supplied display + options, using `DisplayOptions` + viewport_options (dict, optional): Supplied viewport + options, using `ViewportOptions` + viewport2_options (dict, optional): Supplied display + options, using `Viewport2Options` + complete_filename (str, optional): Exact name of output file. Use this + to override the output of `filename` so it excludes frame padding. + log (logger, optional): pass logger for logging messages. + + Example: + >>> # Launch default capture + >>> capture() + >>> # Launch capture with custom viewport settings + >>> capture('persp', 800, 600, + ... viewport_options={ + ... "displayAppearance": "wireframe", + ... "grid": False, + ... "polymeshes": True, + ... }, + ... camera_options={ + ... "displayResolution": True + ... } + ... ) + + + """ + global logger + if log: + logger = log + camera = camera or "persp" + + # Ensure camera exists + if not cmds.objExists(camera): + raise RuntimeError("Camera does not exist: {0}".format(camera)) + + if width and height : + maintain_aspect_ratio = False + width = width or cmds.getAttr("defaultResolution.width") + height = height or cmds.getAttr("defaultResolution.height") + if maintain_aspect_ratio: + ratio = cmds.getAttr("defaultResolution.deviceAspectRatio") + height = round(width / ratio) + + if start_frame is None: + start_frame = cmds.playbackOptions(minTime=True, query=True) + if end_frame is None: + end_frame = cmds.playbackOptions(maxTime=True, query=True) + + # (#74) Bugfix: `maya.cmds.playblast` will raise an error when playblasting + # with `rawFrameNumbers` set to True but no explicit `frames` provided. + # Since we always know what frames will be included we can provide it + # explicitly + if raw_frame_numbers and frame is None: + frame = range(int(start_frame), int(end_frame) + 1) + + # We need to wrap `completeFilename`, otherwise even when None is provided + # it will use filename as the exact name. Only when lacking as argument + # does it function correctly. + playblast_kwargs = dict() + if complete_filename: + playblast_kwargs['completeFilename'] = complete_filename + if frame is not None: + playblast_kwargs['frame'] = frame + if sound is not None: + playblast_kwargs['sound'] = sound + + # We need to raise an error when the user gives a custom frame range with + # negative frames in combination with raw frame numbers. This will result + # in a minimal integer frame number : filename.-2147483648.png for any + # negative rendered frame + if frame and raw_frame_numbers: + check = frame if isinstance(frame, (list, tuple)) else [frame] + if any(f < 0 for f in check): + raise RuntimeError("Negative frames are not supported with " + "raw frame numbers and explicit frame numbers") + + # (#21) Bugfix: `maya.cmds.playblast` suffers from undo bug where it + # always sets the currentTime to frame 1. By setting currentTime before + # the playblast call it'll undo correctly. + cmds.currentTime(cmds.currentTime(query=True)) + + padding = 10 # Extend panel to accommodate for OS window manager + + with _independent_panel(width=width + padding, + height=height + padding, + off_screen=off_screen) as panel: + cmds.setFocus(panel) + + all_playblast_kwargs = { + "compression": compression, + "format": format, + "percent": 100, + "quality": quality, + "viewer": viewer, + "startTime": start_frame, + "endTime": end_frame, + "offScreen": off_screen, + "showOrnaments": show_ornaments, + "forceOverwrite": overwrite, + "filename": filename, + "widthHeight": [width, height], + "rawFrameNumbers": raw_frame_numbers, + "framePadding": frame_padding + } + all_playblast_kwargs.update(playblast_kwargs) + + if getattr(contextlib, "nested", None): + with contextlib.nested( + _disabled_inview_messages(), + _maintain_camera(panel, camera), + _applied_viewport_options(viewport_options, panel), + _applied_camera_options(camera_options, panel), + _applied_display_options(display_options), + _applied_viewport2_options(viewport2_options), + _isolated_nodes(isolate, panel), + _maintained_time() + ): + output = cmds.playblast(**all_playblast_kwargs) + else: + with contextlib.ExitStack() as stack: + stack.enter_context(_disabled_inview_messages()) + stack.enter_context(_maintain_camera(panel, camera)) + stack.enter_context( + _applied_viewport_options(viewport_options, panel) + ) + stack.enter_context( + _applied_camera_options(camera_options, panel) + ) + stack.enter_context( + _applied_display_options(display_options) + ) + stack.enter_context( + _applied_viewport2_options(viewport2_options) + ) + stack.enter_context(_isolated_nodes(isolate, panel)) + stack.enter_context(_maintained_time()) + + output = cmds.playblast(**all_playblast_kwargs) + + return output + + +def snap(*args, **kwargs): + """Single frame playblast in an independent panel. + + The arguments of `capture` are all valid here as well, except for + `start_frame` and `end_frame`. + + Arguments: + frame (float, optional): The frame to snap. If not provided current + frame is used. + clipboard (bool, optional): Whether to add the output image to the + global clipboard. This allows to easily paste the snapped image + into another application, eg. into Photoshop. + + Keywords: + See `capture`. + + """ + + # capture single frame + frame = kwargs.pop('frame', cmds.currentTime(q=1)) + kwargs['start_frame'] = frame + kwargs['end_frame'] = frame + kwargs['frame'] = frame + + if not isinstance(frame, (int, float)): + raise TypeError("frame must be a single frame (integer or float). " + "Use `capture()` for sequences.") + + # override capture defaults + format = kwargs.pop('format', "image") + compression = kwargs.pop('compression', "png") + viewer = kwargs.pop('viewer', False) + raw_frame_numbers = kwargs.pop('raw_frame_numbers', True) + kwargs['compression'] = compression + kwargs['format'] = format + kwargs['viewer'] = viewer + kwargs['raw_frame_numbers'] = raw_frame_numbers + + # pop snap only keyword arguments + clipboard = kwargs.pop('clipboard', False) + + # perform capture + output = capture(*args, **kwargs) + + def replace(m): + """Substitute # with frame number""" + return str(int(frame)).zfill(len(m.group())) + + output = re.sub("#+", replace, output) + + # add image to clipboard + if clipboard: + _image_to_clipboard(output) + + return output + + +CameraOptions = { + "displayGateMask": False, + "displayResolution": False, + "displayFilmGate": False, + "displayFieldChart": False, + "displaySafeAction": False, + "displaySafeTitle": False, + "displayFilmPivot": False, + "displayFilmOrigin": False, + "overscan": 1.0, + "depthOfField": False, +} + +DisplayOptions = { + "displayGradient": True, + "background": (0.631, 0.631, 0.631), + "backgroundTop": (0.535, 0.617, 0.702), + "backgroundBottom": (0.052, 0.052, 0.052), +} + +# These display options require a different command to be queried and set +_DisplayOptionsRGB = set(["background", "backgroundTop", "backgroundBottom"]) + +ViewportOptions = { + # renderer + "rendererName": "vp2Renderer", + "fogging": False, + "fogMode": "linear", + "fogDensity": 1, + "fogStart": 1, + "fogEnd": 1, + "fogColor": (0, 0, 0, 0), + "shadows": False, + "displayTextures": True, + "displayLights": "default", + "useDefaultMaterial": False, + "wireframeOnShaded": False, + "displayAppearance": 'smoothShaded', + "selectionHiliteDisplay": False, + "headsUpDisplay": True, + # object display + "imagePlane": True, + "nurbsCurves": False, + "nurbsSurfaces": False, + "polymeshes": True, + "subdivSurfaces": False, + "planes": True, + "cameras": False, + "controlVertices": True, + "lights": False, + "grid": False, + "hulls": True, + "joints": False, + "ikHandles": False, + "deformers": False, + "dynamics": False, + "fluids": False, + "hairSystems": False, + "follicles": False, + "nCloths": False, + "nParticles": False, + "nRigids": False, + "dynamicConstraints": False, + "locators": False, + "manipulators": False, + "dimensions": False, + "handles": False, + "pivots": False, + "textures": False, + "strokes": False +} + +Viewport2Options = { + "consolidateWorld": True, + "enableTextureMaxRes": False, + "bumpBakeResolution": 64, + "colorBakeResolution": 64, + "floatingPointRTEnable": True, + "floatingPointRTFormat": 1, + "gammaCorrectionEnable": False, + "gammaValue": 2.2, + "lineAAEnable": False, + "maxHardwareLights": 8, + "motionBlurEnable": False, + "motionBlurSampleCount": 8, + "motionBlurShutterOpenFraction": 0.2, + "motionBlurType": 0, + "multiSampleCount": 8, + "multiSampleEnable": False, + "singleSidedLighting": False, + "ssaoEnable": False, + "ssaoAmount": 1.0, + "ssaoFilterRadius": 16, + "ssaoRadius": 16, + "ssaoSamples": 16, + "textureMaxResolution": 4096, + "threadDGEvaluation": False, + "transparencyAlgorithm": 1, + "transparencyQuality": 0.33, + "useMaximumHardwareLights": True, + "vertexAnimationCache": 0, + "renderDepthOfField": 0 +} + + +def apply_view(panel, **options): + """Apply options to panel""" + + camera = cmds.modelPanel(panel, camera=True, query=True) + + # Display options + display_options = options.get("display_options", {}) + _iteritems = getattr(display_options, "iteritems", display_options.items) + for key, value in _iteritems(): + if key in _DisplayOptionsRGB: + cmds.displayRGBColor(key, *value) + else: + cmds.displayPref(**{key: value}) + + # Camera options + camera_options = options.get("camera_options", {}) + _iteritems = getattr(camera_options, "iteritems", camera_options.items) + for key, value in _iteritems: + _safe_setAttr("{0}.{1}".format(camera, key), value) + + # Viewport options + viewport_options = options.get("viewport_options", {}) + _iteritems = getattr(viewport_options, "iteritems", viewport_options.items) + for key, value in _iteritems(): + cmds.modelEditor(panel, edit=True, **{key: value}) + + viewport2_options = options.get("viewport2_options", {}) + _iteritems = getattr( + viewport2_options, "iteritems", viewport2_options.items + ) + for key, value in _iteritems(): + attr = "hardwareRenderingGlobals.{0}".format(key) + _safe_setAttr(attr, value) + + +def parse_active_panel(): + """Parse the active modelPanel. + + Raises + RuntimeError: When no active modelPanel an error is raised. + + Returns: + str: Name of modelPanel + + """ + + panel = cmds.getPanel(withFocus=True) + + # This happens when last focus was on panel + # that got deleted (e.g. `capture()` then `parse_active_view()`) + if not panel or "modelPanel" not in panel: + raise RuntimeError("No active model panel found") + + return panel + + +def parse_active_view(): + """Parse the current settings from the active view""" + panel = parse_active_panel() + return parse_view(panel) + + +def parse_view(panel): + """Parse the scene, panel and camera for their current settings + + Example: + >>> parse_view("modelPanel1") + + Arguments: + panel (str): Name of modelPanel + + """ + + camera = cmds.modelPanel(panel, query=True, camera=True) + + # Display options + display_options = {} + for key in DisplayOptions: + if key in _DisplayOptionsRGB: + display_options[key] = cmds.displayRGBColor(key, query=True) + else: + display_options[key] = cmds.displayPref(query=True, **{key: True}) + + # Camera options + camera_options = {} + for key in CameraOptions: + camera_options[key] = cmds.getAttr("{0}.{1}".format(camera, key)) + + # Viewport options + viewport_options = {} + + # capture plugin display filters first to ensure we never override + # built-in arguments if ever possible a plugin has similarly named + # plugin display filters (which it shouldn't!) + plugins = cmds.pluginDisplayFilter(query=True, listFilters=True) + for plugin in plugins: + plugin = str(plugin) # unicode->str for simplicity of the dict + state = cmds.modelEditor(panel, query=True, queryPluginObjects=plugin) + viewport_options[plugin] = state + + for key in ViewportOptions: + viewport_options[key] = cmds.modelEditor( + panel, query=True, **{key: True}) + + viewport2_options = {} + for key in Viewport2Options.keys(): + attr = "hardwareRenderingGlobals.{0}".format(key) + try: + viewport2_options[key] = cmds.getAttr(attr) + except ValueError: + continue + + return { + "camera": camera, + "display_options": display_options, + "camera_options": camera_options, + "viewport_options": viewport_options, + "viewport2_options": viewport2_options + } + + +def parse_active_scene(): + """Parse active scene for arguments for capture() + + *Resolution taken from render settings. + + """ + + time_control = mel.eval("$gPlayBackSlider = $gPlayBackSlider") + + return { + "start_frame": cmds.playbackOptions(minTime=True, query=True), + "end_frame": cmds.playbackOptions(maxTime=True, query=True), + "width": cmds.getAttr("defaultResolution.width"), + "height": cmds.getAttr("defaultResolution.height"), + "compression": cmds.optionVar(query="playblastCompression"), + "filename": (cmds.optionVar(query="playblastFile") + if cmds.optionVar(query="playblastSaveToFile") else None), + "format": cmds.optionVar(query="playblastFormat"), + "off_screen": (True if cmds.optionVar(query="playblastOffscreen") + else False), + "show_ornaments": (True if cmds.optionVar(query="playblastShowOrnaments") + else False), + "quality": cmds.optionVar(query="playblastQuality"), + "sound": cmds.timeControl(time_control, q=True, sound=True) or None + } + + +def apply_scene(**options): + """Apply options from scene + + Example: + >>> apply_scene({"start_frame": 1009}) + + Arguments: + options (dict): Scene options + + """ + + if "start_frame" in options: + cmds.playbackOptions(minTime=options["start_frame"]) + + if "end_frame" in options: + cmds.playbackOptions(maxTime=options["end_frame"]) + + if "width" in options: + _safe_setAttr("defaultResolution.width", options["width"]) + + if "height" in options: + _safe_setAttr("defaultResolution.height", options["height"]) + + if "compression" in options: + cmds.optionVar( + stringValue=["playblastCompression", options["compression"]]) + + if "filename" in options: + cmds.optionVar( + stringValue=["playblastFile", options["filename"]]) + + if "format" in options: + cmds.optionVar( + stringValue=["playblastFormat", options["format"]]) + + if "off_screen" in options: + cmds.optionVar( + intValue=["playblastFormat", options["off_screen"]]) + + if "show_ornaments" in options: + cmds.optionVar( + intValue=["show_ornaments", options["show_ornaments"]]) + + if "quality" in options: + cmds.optionVar( + floatValue=["playblastQuality", options["quality"]]) + + +@contextlib.contextmanager +def _applied_view(panel, **options): + """Apply options to panel""" + + original = parse_view(panel) + apply_view(panel, **options) + + try: + yield + finally: + apply_view(panel, **original) + + +@contextlib.contextmanager +def _independent_panel(width, height, off_screen=False): + """Create capture-window context without decorations + + Arguments: + width (int): Width of panel + height (int): Height of panel + + Example: + >>> with _independent_panel(800, 600): + ... cmds.capture() + + """ + + # center panel on screen + screen_width, screen_height = _get_screen_size() + topLeft = [int((screen_height-height)/2.0), + int((screen_width-width)/2.0)] + + window = cmds.window(width=width, + height=height, + topLeftCorner=topLeft, + menuBarVisible=False, + titleBar=False, + visible=not off_screen) + cmds.paneLayout() + panel = cmds.modelPanel(menuBarVisible=False, + label='CapturePanel') + + # Hide icons under panel menus + bar_layout = cmds.modelPanel(panel, q=True, barLayout=True) + cmds.frameLayout(bar_layout, edit=True, collapse=True) + + if not off_screen: + cmds.showWindow(window) + + # Set the modelEditor of the modelPanel as the active view so it takes + # the playback focus. Does seem redundant with the `refresh` added in. + editor = cmds.modelPanel(panel, query=True, modelEditor=True) + cmds.modelEditor(editor, edit=True, activeView=True) + + # Force a draw refresh of Maya so it keeps focus on the new panel + # This focus is required to force preview playback in the independent panel + cmds.refresh(force=True) + + try: + yield panel + finally: + # Delete the panel to fix memory leak (about 5 mb per capture) + cmds.deleteUI(panel, panel=True) + cmds.deleteUI(window) + + +@contextlib.contextmanager +def _applied_camera_options(options, panel): + """Context manager for applying `options` to `camera`""" + + camera = cmds.modelPanel(panel, query=True, camera=True) + options = dict(CameraOptions, **(options or {})) + + old_options = dict() + for opt in options.copy(): + try: + old_options[opt] = cmds.getAttr(camera + "." + opt) + except: + sys.stderr.write("Could not get camera attribute " + "for capture: %s" % opt) + options.pop(opt) + + _iteritems = getattr(options, "iteritems", options.items) + for opt, value in _iteritems(): + if cmds.getAttr(camera + "." + opt, lock=True): + continue + else: + _safe_setAttr(camera + "." + opt, value) + + try: + yield + finally: + if old_options: + _iteritems = getattr(old_options, "iteritems", old_options.items) + for opt, value in _iteritems(): + # + if cmds.getAttr(camera + "." + opt, lock=True): + continue + else: + _safe_setAttr(camera + "." + opt, value) + + +@contextlib.contextmanager +def _applied_display_options(options): + """Context manager for setting background color display options.""" + + options = dict(DisplayOptions, **(options or {})) + + colors = ['background', 'backgroundTop', 'backgroundBottom'] + preferences = ['displayGradient'] + + # Store current settings + original = {} + for color in colors: + original[color] = cmds.displayRGBColor(color, query=True) or [] + + for preference in preferences: + original[preference] = cmds.displayPref( + query=True, **{preference: True}) + + # Apply settings + for color in colors: + value = options[color] + cmds.displayRGBColor(color, *value) + + for preference in preferences: + value = options[preference] + cmds.displayPref(**{preference: value}) + + try: + yield + + finally: + # Restore original settings + for color in colors: + cmds.displayRGBColor(color, *original[color]) + for preference in preferences: + cmds.displayPref(**{preference: original[preference]}) + + +@contextlib.contextmanager +def _applied_viewport_options(options, panel): + """Context manager for applying `options` to `panel`""" + + options = dict(ViewportOptions, **(options or {})) + plugin_options = options.pop("pluginObjects", {}) + + # BUGFIX Maya 2020 some keys in viewport options dict may not be unicode + # This is a local OpenPype edit to capture.py for issue #4730 + # TODO: Remove when dropping Maya 2020 compatibility + if int(cmds.about(version=True)) <= 2020: + options = { + str(key): value for key, value in options.items() + } + plugin_options = { + str(key): value for key, value in plugin_options.items() + } + + # Backwards compatibility for `pluginObjects` flattened into `options` + # separate the plugin display filter options since they need to + # be set differently (see #55) + plugins = set(cmds.pluginDisplayFilter(query=True, listFilters=True)) + for plugin in plugins: + if plugin in options: + plugin_options[plugin] = options.pop(plugin) + + # default options + try: + cmds.modelEditor(panel, edit=True, **options) + except TypeError as e: + # Try to set as much as possible of the state by setting them one by + # one. This way we can also report the failing key values explicitly. + for key, value in options.items(): + try: + cmds.modelEditor(panel, edit=True, **{key: value}) + except TypeError: + logger.error("Failing to apply option '{}': {}".format(key, + value)) + + # plugin display filter options + for plugin, state in plugin_options.items(): + cmds.modelEditor(panel, edit=True, pluginObjects=(plugin, state)) + + yield + + +@contextlib.contextmanager +def _applied_viewport2_options(options): + """Context manager for setting viewport 2.0 options. + + These options are applied by setting attributes on the + "hardwareRenderingGlobals" node. + + """ + + options = dict(Viewport2Options, **(options or {})) + + # Store current settings + original = {} + for opt in options.copy(): + try: + original[opt] = cmds.getAttr("hardwareRenderingGlobals." + opt) + except ValueError: + options.pop(opt) + + # Apply settings + _iteritems = getattr(options, "iteritems", options.items) + for opt, value in _iteritems(): + _safe_setAttr("hardwareRenderingGlobals." + opt, value) + + try: + yield + finally: + # Restore previous settings + _iteritems = getattr(original, "iteritems", original.items) + for opt, value in _iteritems(): + _safe_setAttr("hardwareRenderingGlobals." + opt, value) + + +@contextlib.contextmanager +def _isolated_nodes(nodes, panel): + """Context manager for isolating `nodes` in `panel`""" + + if nodes is not None: + cmds.isolateSelect(panel, state=True) + for obj in nodes: + cmds.isolateSelect(panel, addDagObject=obj) + yield + + +@contextlib.contextmanager +def _maintained_time(): + """Context manager for preserving (resetting) the time after the context""" + + current_time = cmds.currentTime(query=1) + try: + yield + finally: + cmds.currentTime(current_time) + + +@contextlib.contextmanager +def _maintain_camera(panel, camera): + state = {} + + if not _in_standalone(): + cmds.lookThru(panel, camera) + else: + state = dict((camera, cmds.getAttr(camera + ".rnd")) + for camera in cmds.ls(type="camera")) + _safe_setAttr(camera + ".rnd", True) + + try: + yield + finally: + _iteritems = getattr(state, "iteritems", state.items) + for camera, renderable in _iteritems(): + _safe_setAttr(camera + ".rnd", renderable) + + +@contextlib.contextmanager +def _disabled_inview_messages(): + """Disable in-view help messages during the context""" + original = cmds.optionVar(q="inViewMessageEnable") + cmds.optionVar(iv=("inViewMessageEnable", 0)) + try: + yield + finally: + cmds.optionVar(iv=("inViewMessageEnable", original)) + + +def _image_to_clipboard(path): + """Copies the image at path to the system's global clipboard.""" + if _in_standalone(): + raise Exception("Cannot copy to clipboard from Maya Standalone") + + image = QtGui.QImage(path) + clipboard = QtWidgets.QApplication.clipboard() + clipboard.setImage(image, mode=QtGui.QClipboard.Clipboard) + + +def _get_screen_size(): + """Return available screen size without space occupied by taskbar""" + if _in_standalone(): + return [0, 0] + + try: + rect = QtWidgets.QDesktopWidget().screenGeometry(-1) + except AttributeError: + # in Qt6 it is a different call + rect = QtWidgets.QApplication.primaryScreen().availableGeometry() + return [rect.width(), rect.height()] + + +def _in_standalone(): + return not hasattr(cmds, "about") or cmds.about(batch=True) + + +def _safe_setAttr(*args, **kwargs): + """Wrapper to handle failures when attribute is locked. + + Temporary hotfix until better approach (store value, unlock, set new, + return old, lock again) is implemented. + """ + try: + cmds.setAttr(*args, **kwargs) + except RuntimeError: + print("Cannot setAttr {}!".format(args)) + + +# -------------------------------- +# +# Apply version specific settings +# +# -------------------------------- + +version = mel.eval("getApplicationVersionAsFloat") +if version > 2015: + Viewport2Options.update({ + "hwFogAlpha": 1.0, + "hwFogFalloff": 0, + "hwFogDensity": 0.1, + "hwFogEnable": False, + "holdOutDetailMode": 1, + "hwFogEnd": 100.0, + "holdOutMode": True, + "hwFogColorR": 0.5, + "hwFogColorG": 0.5, + "hwFogColorB": 0.5, + "hwFogStart": 0.0, + }) + ViewportOptions.update({ + "motionTrails": False + }) diff --git a/package.py b/package.py index 4537c23e..274f7486 100644 --- a/package.py +++ b/package.py @@ -1,3 +1,7 @@ name = "maya" title = "Maya" -version = "0.1.20" +version = "0.2.0" + +ayon_required_addons = { + "core": ">0.3.2", +}