Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions client/ayon_maya/api/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -1811,6 +1811,10 @@ def get_container_members(container):
# Assume it's a container dictionary
container = container["objectName"]

if cmds.nodeType(container) != "objectSet":
# Assume the member is the imprinted node itself
return [container]

members = cmds.sets(container, query=True) or []
members = cmds.ls(members, long=True, objectsOnly=True) or []
all_members = set(members)
Expand Down
77 changes: 42 additions & 35 deletions client/ayon_maya/api/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import shutil

from maya import utils, cmds, OpenMaya
import maya.api.OpenMaya as om

import pyblish.api

Expand Down Expand Up @@ -373,6 +372,12 @@ def parse_container(container):
"""
data = lib.read(container)

# Allow `AYON_` prefixed containers, strip the prefix
if "id" not in data and "AYON_id" in data:
for key in list(data.keys()):
if key.startswith("AYON_"):
data[key[5:]] = data.pop(key)

# Backwards compatibility pre-schemas for containers
data["schema"] = data.get("schema", "openpype:container-1.0")

Expand All @@ -392,36 +397,26 @@ def _ls():
str: AYON container node name (objectSet)

"""

def _maya_iterate(iterator):
"""Helper to iterate a maya iterator"""
while not iterator.isDone():
yield iterator.thisNode()
iterator.next()

ids = {
AYON_CONTAINER_ID,
# Backwards compatibility
AVALON_CONTAINER_ID
}

# Iterate over all 'set' nodes in the scene to detect whether
# they have the ayon container ".id" attribute.
fn_dep = om.MFnDependencyNode()
iterator = om.MItDependencyNodes(om.MFn.kSet)
for mobject in _maya_iterate(iterator):
if mobject.apiTypeStr != "kSet":
# Only match by exact type
continue

fn_dep.setObject(mobject)
if not fn_dep.hasAttribute("id"):
# Find all nodes by attribute existence
for node_attr in cmds.ls(
["*.id", "*.AYON_id"],
recursive=True,
long=True
):
try:
value = cmds.getAttr(node_attr)
except RuntimeError:
# Not a gettable attribute
continue

plug = fn_dep.findPlug("id", True)
value = plug.asString()
if value in ids:
yield fn_dep.name()
yield node_attr.split(".", 1)[0]


def ls():
Expand All @@ -440,6 +435,30 @@ def ls():
yield parse_container(container)


def imprint_container(container,
name,
namespace,
context,
loader=None,
prefix=None):
data = [
("schema", "ayon:container-3.0"),
("id", AVALON_CONTAINER_ID),
("name", name),
("namespace", namespace),
("loader", loader),
("representation", context["representation"]["id"]),
("project_name", context["project"]["name"])
]
for key, value in data:

if prefix is not None:
key = prefix + key

cmds.addAttr(container, longName=key, dataType="string")
cmds.setAttr(container + "." + key, str(value), type="string")


@lib.undo_chunk()
def containerise(name,
namespace,
Expand All @@ -465,19 +484,7 @@ def containerise(name,

"""
container = cmds.sets(nodes, name="%s_%s_%s" % (namespace, name, suffix))

data = [
("schema", "openpype:container-2.0"),
("id", AVALON_CONTAINER_ID),
("name", name),
("namespace", namespace),
("loader", loader),
("representation", context["representation"]["id"]),
("project_name", context["project"]["name"])
]
for key, value in data:
cmds.addAttr(container, longName=key, dataType="string")
cmds.setAttr(container + "." + key, str(value), type="string")
imprint_container(container, namespace, context, loader)

main_container = cmds.ls(AVALON_CONTAINERS, type="objectSet")
if not main_container:
Expand Down
101 changes: 62 additions & 39 deletions client/ayon_maya/plugins/load/load_gpucache.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import maya.cmds as cmds
from ayon_core.pipeline import get_representation_path
from ayon_core.settings import get_project_settings
from ayon_maya.api.lib import unique_namespace
from ayon_maya.api.pipeline import containerise
from ayon_maya.api.pipeline import imprint_container
from ayon_maya.api import plugin
from ayon_maya.api.plugin import get_load_color_for_product_type

Expand All @@ -20,75 +18,100 @@ class GpuCacheLoader(plugin.Loader):

def load(self, context, name, namespace, data):
folder_name = context["folder"]["name"]
namespace = namespace or unique_namespace(
folder_name + "_",
prefix="_" if folder_name[0].isdigit() else "",
suffix="_",
)

cmds.loadPlugin("gpuCache", quiet=True)

# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
# Create GPU cache
label = "{}_{}".format(folder_name, name)
transform_name = label + "_#"
transform = cmds.createNode("transform", name=transform_name)
cache = cmds.createNode("gpuCache",
parent=transform,
name="{0}Shape".format(transform_name))

# Colorize root transform
project_name = context["project"]["name"]
settings = get_project_settings(project_name)
color = get_load_color_for_product_type("model", settings)
if color is not None:
red, green, blue = color
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(transform + ".useOutlinerColor", 1)
cmds.setAttr(
root + ".outlinerColor", red, green, blue
transform + ".outlinerColor", red, green, blue
)

# Create transform with shape
transform_name = label + "_GPU"
transform = cmds.createNode("transform", name=transform_name,
parent=root)
cache = cmds.createNode("gpuCache",
parent=transform,
name="{0}Shape".format(transform_name))

# Set the cache filepath
path = self.filepath_from_context(context)
cmds.setAttr(cache + '.cacheFileName', path, type="string")
cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root

# Lock parenting of the transform and cache
cmds.lockNode([transform, cache], lock=True)

nodes = [root, transform, cache]
self[:] = nodes

return containerise(
imprint_container(
cache,
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
loader=self.__class__.__name__,
prefix="AYON_")

return cache

def update(self, container, context):
repre_entity = context["representation"]
path = get_representation_path(repre_entity)
if self._is_legacy_container(container):
return self._legacy_update(container, context)

cache = container["objectName"]

# Update the cache
path = self.filepath_from_context(context)
cmds.setAttr(cache + ".cacheFileName", path, type="string")

# Update representation id
cmds.setAttr(cache + ".AYON_representation",
context["representation"]["id"],
type="string")

def switch(self, container, context):
self.update(container, context)

def remove(self, container):
if self._is_legacy_container(container):
return self._legacy_remove(container)

# Remove shape and parent transforms
# If the shape was instanced, remove each transform
cache = container['objectName']
paths = cmds.ls(cache, allPaths=True, long=True)
transforms = cmds.listRelatives(paths, parent=True, fullPath=True)

members = transforms + [cache]
cmds.delete(members)

def _is_legacy_container(self, container):
"""The GPU caches used to be containerized in Maya as objectSets
like most other Maya loaders - however, this has the overhead of adding
redundant bloat to the Maya scene and disallowed the loaded gpuCaches
to be regularly duplicated.

As such, any container that is an objectSet is considered legacy
"""
return cmds.nodeType(container["objectName"]) == "objectSet"

def _legacy_update(self, container, context):
path = self.filepath_from_context(context)

# Update the cache
members = cmds.sets(container['objectName'], query=True)
caches = cmds.ls(members, type="gpuCache", long=True)

assert len(caches) == 1, "This is a bug"

for cache in caches:
cmds.setAttr(cache + ".cacheFileName", path, type="string")

cmds.setAttr(container["objectName"] + ".representation",
repre_entity["id"],
context["representation"]["id"],
type="string")

def switch(self, container, context):
self.update(container, context)

def remove(self, container):
def _legacy_remove(self, container):
members = cmds.sets(container['objectName'], query=True)
cmds.lockNode(members, lock=False)
cmds.delete([container['objectName']] + members)
Expand All @@ -98,4 +121,4 @@ def remove(self, container):
cmds.namespace(removeNamespace=container['namespace'],
deleteNamespaceContent=True)
except RuntimeError:
pass
pass