2
"""Contains an implementation for the Persistence plugin for easy access within
5
__docformat__ = "restructuredtext"
8
from persistence import PyPickleData
9
import maya.OpenMaya as api
11
import mrv.maya.undo as undo
12
from mrv.util import iDuplicatable
14
from base import Node, DependNode, Data, createNode, delete
15
from set import ObjectSet
19
MFnDependencyNode = api.MFnDependencyNode
21
__all__ = ("StorageBase", "StorageNode")
24
# Functions to access most functionality of the storagebase without actually deriving from it
25
# They are as low-level as possible regarding their input parameters
27
def findStoragePlug(masterPlug, dataID):
28
""":return: compound plug containing all data and connections for the given dataID
29
:param masterPlug: compound plug containing all data"""
30
for compoundplug in masterPlug:
31
if compoundplug.child(0).asString() == dataID:
33
# END for each elemnt (in search for mathching dataID)
37
def _makeElementPlug(masterPlug, dataID):
38
"""Find an empty logical plug index and return the newly created
39
logical plug with given dataID - unconditionally"""
40
elementPlug = masterPlug.mnextLogicalPlug()
41
elementPlug.child(0).msetString(dataID)
45
def storagePlug(masterPlug, dataID, plugType = None, autoCreate=False):
47
:return: plug of the given type, either as tuple of two plugs or the plug
49
:param masterPlug: masterPlug to search for data
50
:param dataID: the name of the plug to be returned
52
StorageBase.kMessage: return message array plug only
53
StorageBase.kValue: return python pickle array plug only
54
StorageBase.kStorage: return the storage plug itself containing message and the value plug
55
StorageBase.kFlags return plug to integer which can be used as storage for bitflags to accompany the id
56
None: return (picklePlug , messagePlug)
57
:param autoCreate: if True, a plug with the given dataID will be created if it does not
59
:raise AttributeError: if a plug with dataID does not exist and default value is None
60
:raise TypeError: if plugtype unknown """
61
matchedplug = findStoragePlug(masterPlug, dataID)
62
if matchedplug is None:
64
matchedplug = _makeElementPlug(masterPlug, dataID)
66
raise AttributeError("Plug with id %s not found" % dataID)
67
# END matched plug not found handling
71
return (matchedplug.child(2), matchedplug.child(3))
72
elif plugType == StorageBase.kStorage:
74
elif plugType == StorageBase.kValue:
75
return matchedplug.child(2)
76
elif plugType == StorageBase.kMessage:
77
return matchedplug.child(3)
78
elif plugType == StorageBase.kFlags:
79
return matchedplug.child(1)
81
raise TypeError("Invalid plugType value: %s" % plugType)
85
def makePlug(masterPlug, dataID):
86
"""retrieve or create a plug that corresponds to the given dataID
87
:param dataID: string identifier
88
:return: the created data plug, containing subplugs dval and dmsg
89
for generic data and message connections respectively """
90
existingPlug = findStoragePlug(masterPlug, dataID)
91
if existingPlug is not None:
94
# otherwise create it - find a free logical index - do a proper search
95
return _makeElementPlug(masterPlug, dataID)
97
def setsByPlug(mdplug):
98
""":return: all objectSets stored at the given message data plug"""
99
allnodes = [p.mwrappedNode() for p in mdplug.minputs()]
100
return [n for n in allnodes if isinstance(n, ObjectSet)]
102
def partition(mdplug):
103
""":return: parition node attached to the sets of the given message data plug or None"""
104
sets = setsByPlug(mdplug)
106
# get the dominant partition
109
partitions.extend(s.partitions())
112
if hasattr(p, StorageBase.kPartitionIdAttr):
114
# END for each partition
116
# nothing found, there is no partition yet
120
def objectSet(mdplug, setIndex, autoCreate=True, setPrefix=''):
121
"""Get an object set identified with setIndex at the given dataId
123
:param mdplug: data message plug whose object set to handle
124
:param setIndex: logical index at which the set will be connected to our message plug array
125
:param autoCreate: if True, a set will be created if it does not yet exist
126
:param setPrefix: if given, the string will be used as prefix for the name of newly created
128
:raises ValueError: if a set does not exist at setIndex and autoCreate is False
129
:raises AttributeError: if the plug did not exist (and autocreate is False)
130
:note: method is implicitly undoable if autoCreate is True, this also means that you cannot
131
explicitly undo this operation as you do not know if undo has been queued or not
132
:note: newly created sets will automatically use partitions if one of the sets does"""
135
# array plug having our sets
136
setplug = mp.elementByLogicalIndex(setIndex)
137
inputplug = setplug.minput()
138
if inputplug.isNull():
140
raise AttributeError("Set at %s[%i] did not exist" % (mp.name(), setIndex))
141
su = undo.StartUndo() # make the following operations atomic
142
objset = createNode(setPrefix + "Set", "objectSet", forceNewLeaf = True)
143
inputplug = objset.message
144
inputplug.mconnectTo(setplug)
146
# hook it up to the partition
147
if partition(mdplug):
148
setPartition(mdplug, True)
149
# END create set as needed
151
# return actual object set
152
return inputplug.mwrappedNode()
154
def dataIDs(masterPlug, data_prefix=''):
156
:return: list of all data ids available in the given master plug
157
:param data_prefix: the string prefix of data names which must match with the prefix
158
of the data id to be returned, with the matching prefix pruned.
159
By default, all data ids will match"""
161
for compoundplug in masterPlug:
162
did = compoundplug.child(0).asString()
163
if did.startswith(data_prefix):
164
outids.append(did[len(data_prefix):])
166
# END for each compound plug element
170
def setPartition(mdplug, state):
171
"""Make all sets of the given data message plug use a partition or not
172
:param state: if True, a partition will be used, if False, it will be disabled
173
:note: this method makes sure that all sets are hooked up to the partition
174
:raise ValueError: If we did not have a single set to which to add to the partition
175
:raise AttributeError: If the dataID has never had sets
176
:return: if state is True, the name of the possibly created (or existing) partition"""
177
sets = setsByPlug(mdplug)
178
pt = partition(mdplug)
183
raise ValueError("Cannot create partition as plug %s did not have any connected sets" % mdplug)
184
# END check sets exist
186
pt = createNode("storagePartition", "partition", forceNewLeaf=True)
188
tattr = api.MFnTypedAttribute()
189
attr = tattr.create(StorageBase.kPartitionIdAttr, "pid", api.MFnData.kString)
190
pt.addAttribute(attr)
191
# END create partition
193
# make sure all sets are members of our partition
199
# have to clear partition as, for some reason, or own node will be killed as well !
202
# END if we have a partision
206
def clearDataPlug(vdplug):
207
"""Clear the data in the given value data plug"""
208
plugindataobj = api.MFnPluginData().create(PyPickleData.kPluginDataId)
209
vdplug.msetMObject(plugindataobj)
212
def clearAllData(masterPlug):
213
"""Empty all data storage plugs beneath the given masterPlug. Message connections are currently
215
for compoundplug in masterPlug:
216
clearDataPlug(compoundplug.child(2))
217
#END for each element plug
220
def deleteObjectSet(mdplug, setIndex):
221
"""Delete the object set at the given message data plug, at the given setIndex
222
:note: use this method to delete your sets instead of manual deletion as it will automatically
223
remove the managed partition in case the last set is being deleted"""
225
objset = objectSet(mdplug, setIndex, autoCreate = False)
226
except AttributeError:
227
# did not exist, its fine
230
# if this is the last set, remove the partition as well
231
if len(setsByPlug(mdplug)) == 1:
232
setPartition(mdplug, False)
235
# END obj set handling
237
#} END procedural access
241
class StorageBase(iDuplicatable):
242
"""A storage node contains a set of attributes allowing it to store
243
python data and objects being stored in a pickled format upon file save.
244
Additionally you can store connections.
245
Nodes used with this interface must be compatible to the following attribute scheme.
246
To create that scheme, use `createStorageAttribute`
248
**Attribute Setup**::
250
(shortname (description) [data type])
251
dta (data)[multi compound]
253
type (data type) [int] # for your own use, store bitflags to specify attribute
254
dval (data value) [python pickle]
255
dmsg (data message)[multi message]
259
data_prefix: will prefix every value name when setting and getting values - this allows
260
several clients to use the same storage attribute (on the same node for example)
261
It acts like a namespace
262
attr_prefix: prefixes the actual maya attribute to access
263
maya_node: the maya node holding the actual attributes
265
:note: A mrv node should derive from this class to allow easy attribute access of its
266
own compatible attributes - its designed for flexiblity
267
:note: attribute accepts on the generic attribute should be set by a plugin node when it
268
creates its attributes
269
:todo: should self._node be stored as weakref ?"""
270
kValue, kMessage, kStorage, kFlags = range(4)
271
kPartitionIdAttr = "bda_storagePartition" # may not change !
273
class PyPickleValue(object):
274
"""Wrapper object prividing native access to the wrapped python pickle object
275
and to the corresponding value plug, providing utlity methods for easier handling"""
276
__slots__ = ('_plug', '_pydata', '_isReferenced', '_updateCalled')
278
def __init__(self, valueplug, pythondata):
279
"""value plug contains the plugin data in pythondata"""
280
sa = object.__setattr__
281
sa(self, '_plug', valueplug)
282
sa(self, '_pydata', pythondata)
283
# note: Dont' use the wrapped node to prevent dependency cycles and improve performance
284
sa(self, '_isReferenced', MFnDependencyNode(valueplug.node()).isFromReferencedFile())
285
sa(self, '_updateCalled', False)
288
return len(self._pydata)
291
return iter(self._pydata)
293
def __getattr__(self, attr):
294
return getattr(self._pydata, attr)
296
def __setattr__(self, attr, val):
298
object.__setattr__(self, attr, val)
299
except AttributeError:
300
self._pydata[attr] = val
302
def __getitem__(self, key):
303
return self._pydata[key]
305
def __setitem__(self, key, value):
306
self._pydata[key] = value
307
if self._isReferenced:
308
self._valueChanged() # assure we make it into the reference , but only if we change
310
def __delitem__(self, key):
311
del(self._pydata[key])
313
def _valueChanged(self):
314
"""Will be called automatically if the underlying value changed if
315
the node of the underlying plug is referenced
317
:note: this method will only be called once during the lifetime of this object if it changes,
318
as its enough to trigger reference to write the value if it changes once.
319
Getting and setting data is expensive as there is a tracking dict in the background
320
being spawned with internally created copies."""
321
if self._updateCalled:
323
self._plug.msetMObject(self._plug.asMObject())
324
self._updateCalled = True
328
def isReferenced(self):
329
""":return: True if the data is from a referenced plug"""
330
return self._isReferenced
333
# END class pypickle value
335
__slots__ = ('_dprefix', '_aprefix', '_node')
337
#{ Overridden Methods
338
def __init__(self, data_prefix='', maya_node = None, attr_prefix=''):
339
"""Allows customization of this base to modify its behaviour
340
:note: see more information on the input attributes in the class description"""
341
# now one can derive from us and override __setattr__
342
object.__init__(self)
343
self._dprefix = data_prefix
344
self._aprefix = attr_prefix
345
self._node = maya_node
347
if not isinstance(self, Node):
348
raise TypeError("StorageNode's derived class must be an instance of type %r if maya_node is not given" % Node)
350
# END no maya node given handling
352
#} END overridden methods
355
def createInstance(self, *args, **kwargs):
356
"""Create a new instance with our type"""
357
return self.__class__(self._dprefix, self._node, self._aprefix)
359
def copyFrom(self, other, *args, **kwargs):
360
"""Copy all values from other to ourselves
364
if True, default False, only a shallow copy will
365
be made. If False, a deep copy will be made
366
:note: only does so if the attribute and data prefixes actually match (which should be
367
the case if we get here, checking for it anyway
368
:note: as pickle data always copies by reference to be efficient, we have to explicitly
369
create new data to assure we really copy it
370
:todo: copy connections to our messages as well, make it an option at least"""
371
self.setDataPrefix(other.dataPrefix())
372
self.setAttributePrefix(other.attributePrefix())
374
shallow = kwargs.pop("shallow", False)
375
for dataid in other.dataIDs():
376
othervalplug = other.storagePlug(dataid, plugType = self.kValue, autoCreate = False)
377
ownvalplug = self.storagePlug(dataid, plugType = self.kValue, autoCreate = True)
379
clearDataPlug(ownvalplug)
382
ownvalplug.msetMObject(othervalplug.asMObject())
384
owndict = self.pythonDataFromPlug(ownvalplug)
385
otherdict = other.pythonDataFromPlug(othervalplug)
388
for key in otherdict:
390
if isinstance(val, iDuplicatable):
391
owndict[key] = val.duplicate()
393
# try deep copy, use shallow copy on error
395
owndict[key] = copy.deepcopy(val)
399
# END for each key to deep copy
400
# END shallow/deep copy
401
# END for each dataid
407
def makePlug(self, dataID):
408
"""see ``makePlug`` module level function"""
409
return makePlug(self.masterPlug(), self._dprefix + dataID)
412
def clearAllData(self):
413
"""see ``clearAllData`` module level method
414
:note: use this method if you want to make sure your node
415
is empty after it has been duplicated (would usually be done in the postContructor"""
416
return clearAllData(self.masterPlug())
419
def clearData(self, dataID):
420
"""Clear all data stored in the given dataID"""
422
valueplug = self.storagePlug(dataID, plugType=self.kValue, autoCreate = False)
423
except AttributeError:
426
clearDataPlug(valueplug)
427
# ELSE attr exists and clearage is required
434
def _elementPlug(self, dataID, dataType, autoCreate=False):
435
""":return: element plug of the given type"""
436
return storagePlug(self.masterPlug(), self._dprefix + dataID, dataType, autoCreate)
438
def findStoragePlug(self, dataID):
439
""":return: compound plug with given dataID or None"""
440
return findStoragePlug(self.masterPlug(), self._dprefix + dataID)
442
def masterPlug(self):
443
""":return: master plug according to our attributePrefix"""
444
return self._node.findPlug(self._aprefix + 'dta')
447
"""see module level function with the same name"""
448
return dataIDs(self.masterPlug(), self._dprefix)
450
def storagePlug(self, dataID, plugType = None, autoCreate=False):
451
"""see ``storagePlug`` module level function"""
452
return storagePlug(self.masterPlug(), self._dprefix+dataID, plugType, autoCreate)
458
def pythonData(self, dataID, **kwargs):
459
""":return: PyPickleVal object at the given index (it can be modified natively)
460
:param dataID: id of of the data to retrieve
463
element number of the plug to retrieve, or -1 to get a new plug.
464
Plugs will always be created, the given index specifies a logical plug index
465
* Additionally all arguments supported by `storagePlug`"""
466
return self.pythonDataFromPlug(self._elementPlug(dataID, StorageBase.kValue, **kwargs))
469
def pythonDataFromPlug(cls, valplug):
470
"""Exract the python data using the given plug directly
472
:param valplug: data value plug containing the plugin data
473
:return: PyPickleData object allowing data access"""
475
# initialize data if required
476
# if the data is null, we do not get a kNullObject, but an exception - fair enough ...
478
plugindata = valplug.masData()
481
plugindataobj = api.MFnPluginData().create(PyPickleData.kPluginDataId)
483
# data gets copied here - re-retrieve data
484
valplug.msetMObject(plugindataobj) # use original version only - no undo support
485
plugindata = Data(plugindataobj)
488
#return plugindata.data()
489
return StorageBase.PyPickleValue(valplug, plugindata.data())
495
def objectSet(self, dataID, setIndex, autoCreate = True):
496
"""see module level ``objectSet`` function"""
497
return objectSet(self._elementPlug(dataID, self.kMessage, autoCreate), setIndex, autoCreate, dataID)
500
def deleteObjectSet(self, dataID, setIndex):
501
"""See the module level method called ``deleteObjectSet``"""
503
return deleteObjectSet(self._elementPlug(dataID, self.kMessage, autoCreate = False), setIndex)
504
except (ValueError, AttributeError):
505
# did not exist, its fine
508
# END handle no such plug exists
510
def setsByID(self, dataID):
511
""":return: all object sets stored under the given dataID"""
512
return setsByPlug(self._elementPlug(dataID, self.kMessage, autoCreate=False))
516
def setPartition(self, dataID, state):
517
"""see ``setPartition`` function on module level"""
518
return setPartition(self._elementPlug(dataID, self.kMessage, autoCreate=False), state)
521
def partition(self, dataID):
522
""":return: partition Node attached to the sets at dataID or None if state is disabled"""
523
return partition(self._elementPlug(dataID, self.kMessage, autoCreate=False))
529
def storageNode(self):
530
""":return: Node actually being used as storage"""
533
def setStorageNode(self, node):
534
"""Set ourselves to use the given storage compatible node
536
:note: use this if the path of our instance has changed - otherwise
537
trying to access functions will fail as the path of our node might be invalid"""
540
def setDataPrefix(self, prefix):
541
"""Change the data prefix to the given value string"""
542
self._dprefix = prefix
544
def dataPrefix(self):
545
""":return: our data prefix"""
548
def attributePrefix(self):
549
""":return: our attribute prefix"""
552
def setAttributePrefix(self, prefix):
553
"""Change the prefix with which to access to the actual storage data attributes on our node
554
to the given string"""
555
self._aprefix = prefix
560
class StorageNode(DependNode, StorageBase):
561
"""This node can be used as pythonic and easy-to-access value container - it could
562
be connected to your node, and queried for values actually being queried on your node.
563
As value container, it can easily be replaced by another one, or keep different sets of information
565
:note: the storage node can only use generic attributes and recover them properly during scene reload
566
if the configuration of the generic attributes have been setup properly - they are unique only per
567
node type, not per instance of the node type.
568
Thus it is recommened to use the storage node attribute base on your own custom type that setsup the
569
generic attributes as it requires during plugin load"""
571
#{ Overrriden Methods
572
def __init__(self, *args):
573
"""initialize bases properly"""
574
DependNode.__init__(self)
575
StorageBase.__init__(self)
578
#} END overridden methods