Skip to content

Commit

Permalink
Merge pull request #164 from BigRoy/bugfix/cache_farm_publish_order
Browse files Browse the repository at this point in the history
Bugfix: Farm cache publish support newer ayon-deadline versions
  • Loading branch information
BigRoy authored Nov 13, 2024
2 parents 0f94942 + c0b04b8 commit 45b7806
Showing 1 changed file with 23 additions and 16 deletions.
39 changes: 23 additions & 16 deletions client/ayon_houdini/plugins/publish/collect_cache_farm.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,9 @@
)


class CollectDataforCache(plugin.HoudiniInstancePlugin):
"""Collect data for caching to Deadline."""

# Run after Collect Frames
order = pyblish.api.CollectorOrder + 0.11
class CollectFarmCacheFamily(plugin.HoudiniInstancePlugin):
"""Collect publish.hou family for caching on farm as early as possible."""
order = pyblish.api.CollectorOrder - 0.45
families = ["ass", "pointcache", "redshiftproxy", "vdbcache", "model"]
targets = ["local", "remote"]
label = "Collect Data for Cache"
Expand All @@ -24,6 +22,19 @@ def process(self, instance):
self.log.debug("Caching on farm is disabled. "
"Skipping farm collecting.")
return
instance.data["families"].append("publish.hou")


class CollectDataforCache(plugin.HoudiniInstancePlugin):
"""Collect data for caching to Deadline."""

# Run after Collect Frames
order = pyblish.api.CollectorOrder + 0.11
families = ["publish.hou"]
targets = ["local", "remote"]
label = "Collect Data for Cache"

def process(self, instance):
# Why do we need this particular collector to collect the expected
# output files from a ROP node. Don't we have a dedicated collector
# for that yet?
Expand All @@ -35,27 +46,23 @@ def process(self, instance):
ropnode = hou.node(instance.data["instance_node"])
output_parm = lib.get_output_parameter(ropnode)
expected_filepath = output_parm.eval()
instance.data.setdefault("files", list())
instance.data.setdefault("expectedFiles", list())

files = instance.data.setdefault("files", list())
frames = instance.data.get("frames", "")
if isinstance(frames, str):
# single file
instance.data["files"].append(expected_filepath)
files.append(expected_filepath)
else:
# list of files
staging_dir, _ = os.path.split(expected_filepath)
instance.data["files"].extend(
["{}/{}".format(staging_dir, f) for f in frames]
)
files.extend("{}/{}".format(staging_dir, f) for f in frames)

cache_files = {"cache": instance.data["files"]}
expected_files = instance.data.setdefault("expectedFiles", list())
expected_files.append({"cache": files})
self.log.debug(f"Caching on farm expected files: {expected_files}")

instance.data.update({
# used in HoudiniCacheSubmitDeadline in ayon-deadline
"plugin": "Houdini",
"publish": True
})
instance.data["families"].append("publish.hou")
instance.data["expectedFiles"].append(cache_files)

self.log.debug("Caching on farm expected files: {}".format(instance.data["expectedFiles"]))

0 comments on commit 45b7806

Please sign in to comment.