1
2 """Contains an implementation for the Persistence plugin for easy access within
3 mrv and derived nodes.
4 """
5 __docformat__ = "restructuredtext"
6
7 import os
8 from persistence import PyPickleData
9 import maya.OpenMaya as api
10
11 import mrv.maya.undo as undo
12 from mrv.util import iDuplicatable
13
14 from base import Node, DependNode, Data, createNode, delete
15 from set import ObjectSet
16
17 import copy
18
19 MFnDependencyNode = api.MFnDependencyNode
20
21 __all__ = ("StorageBase", "StorageNode")
28 """:return: compound plug containing all data and connections for the given dataID
29 :param masterPlug: compound plug containing all data"""
30 for compoundplug in masterPlug:
31 if compoundplug.child(0).asString() == dataID:
32 return compoundplug
33
34 return None
35
38 """Find an empty logical plug index and return the newly created
39 logical plug with given dataID - unconditionally"""
40 elementPlug = masterPlug.mnextLogicalPlug()
41 elementPlug.child(0).msetString(dataID)
42 return elementPlug
43
44 @undoable
45 -def storagePlug(masterPlug, dataID, plugType = None, autoCreate=False):
46 """
47 :return: plug of the given type, either as tuple of two plugs or the plug
48 specified by plugType
49 :param masterPlug: masterPlug to search for data
50 :param dataID: the name of the plug to be returned
51 :param plugType:
52 StorageBase.kMessage: return message array plug only
53 StorageBase.kValue: return python pickle array plug only
54 StorageBase.kStorage: return the storage plug itself containing message and the value plug
55 StorageBase.kFlags return plug to integer which can be used as storage for bitflags to accompany the id
56 None: return (picklePlug , messagePlug)
57 :param autoCreate: if True, a plug with the given dataID will be created if it does not
58 yet exist
59 :raise AttributeError: if a plug with dataID does not exist and default value is None
60 :raise TypeError: if plugtype unknown """
61 matchedplug = findStoragePlug(masterPlug, dataID)
62 if matchedplug is None:
63 if autoCreate:
64 matchedplug = _makeElementPlug(masterPlug, dataID)
65 else:
66 raise AttributeError("Plug with id %s not found" % dataID)
67
68
69
70 if plugType is None:
71 return (matchedplug.child(2), matchedplug.child(3))
72 elif plugType == StorageBase.kStorage:
73 return matchedplug
74 elif plugType == StorageBase.kValue:
75 return matchedplug.child(2)
76 elif plugType == StorageBase.kMessage:
77 return matchedplug.child(3)
78 elif plugType == StorageBase.kFlags:
79 return matchedplug.child(1)
80 else:
81 raise TypeError("Invalid plugType value: %s" % plugType)
82
83
84 @undoable
85 -def makePlug(masterPlug, dataID):
86 """retrieve or create a plug that corresponds to the given dataID
87 :param dataID: string identifier
88 :return: the created data plug, containing subplugs dval and dmsg
89 for generic data and message connections respectively """
90 existingPlug = findStoragePlug(masterPlug, dataID)
91 if existingPlug is not None:
92 return existingPlug
93
94
95 return _makeElementPlug(masterPlug, dataID)
96
101
118
119 @undoable
120 -def objectSet(mdplug, setIndex, autoCreate=True, setPrefix=''):
121 """Get an object set identified with setIndex at the given dataId
122
123 :param mdplug: data message plug whose object set to handle
124 :param setIndex: logical index at which the set will be connected to our message plug array
125 :param autoCreate: if True, a set will be created if it does not yet exist
126 :param setPrefix: if given, the string will be used as prefix for the name of newly created
127 object sets
128 :raises ValueError: if a set does not exist at setIndex and autoCreate is False
129 :raises AttributeError: if the plug did not exist (and autocreate is False)
130 :note: method is implicitly undoable if autoCreate is True, this also means that you cannot
131 explicitly undo this operation as you do not know if undo has been queued or not
132 :note: newly created sets will automatically use partitions if one of the sets does"""
133 mp = mdplug
134
135
136 setplug = mp.elementByLogicalIndex(setIndex)
137 inputplug = setplug.minput()
138 if inputplug.isNull():
139 if not autoCreate:
140 raise AttributeError("Set at %s[%i] did not exist" % (mp.name(), setIndex))
141 su = undo.StartUndo()
142 objset = createNode(setPrefix + "Set", "objectSet", forceNewLeaf = True)
143 inputplug = objset.message
144 inputplug.mconnectTo(setplug)
145
146
147 if partition(mdplug):
148 setPartition(mdplug, True)
149
150
151
152 return inputplug.mwrappedNode()
153
154 -def dataIDs(masterPlug, data_prefix=''):
155 """
156 :return: list of all data ids available in the given master plug
157 :param data_prefix: the string prefix of data names which must match with the prefix
158 of the data id to be returned, with the matching prefix pruned.
159 By default, all data ids will match"""
160 outids = list()
161 for compoundplug in masterPlug:
162 did = compoundplug.child(0).asString()
163 if did.startswith(data_prefix):
164 outids.append(did[len(data_prefix):])
165
166
167 return outids
168
171 """Make all sets of the given data message plug use a partition or not
172 :param state: if True, a partition will be used, if False, it will be disabled
173 :note: this method makes sure that all sets are hooked up to the partition
174 :raise ValueError: If we did not have a single set to which to add to the partition
175 :raise AttributeError: If the dataID has never had sets
176 :return: if state is True, the name of the possibly created (or existing) partition"""
177 sets = setsByPlug(mdplug)
178 pt = partition(mdplug)
179
180 if state:
181 if pt is None:
182 if not sets:
183 raise ValueError("Cannot create partition as plug %s did not have any connected sets" % mdplug)
184
185
186 pt = createNode("storagePartition", "partition", forceNewLeaf=True)
187
188 tattr = api.MFnTypedAttribute()
189 attr = tattr.create(StorageBase.kPartitionIdAttr, "pid", api.MFnData.kString)
190 pt.addAttribute(attr)
191
192
193
194 pt.addSets(sets)
195 return pt
196 else:
197 if pt:
198
199
200 pt.clear()
201 delete(pt)
202
210
213 """Empty all data storage plugs beneath the given masterPlug. Message connections are currently
214 not affected"""
215 for compoundplug in masterPlug:
216 clearDataPlug(compoundplug.child(2))
217
221 """Delete the object set at the given message data plug, at the given setIndex
222 :note: use this method to delete your sets instead of manual deletion as it will automatically
223 remove the managed partition in case the last set is being deleted"""
224 try:
225 objset = objectSet(mdplug, setIndex, autoCreate = False)
226 except AttributeError:
227
228 return
229 else:
230
231 if len(setsByPlug(mdplug)) == 1:
232 setPartition(mdplug, False)
233
234 delete(objset)
235
242 """A storage node contains a set of attributes allowing it to store
243 python data and objects being stored in a pickled format upon file save.
244 Additionally you can store connections.
245 Nodes used with this interface must be compatible to the following attribute scheme.
246 To create that scheme, use `createStorageAttribute`
247
248 **Attribute Setup**::
249
250 (shortname (description) [data type])
251 dta (data)[multi compound]
252 id (data id)[string]
253 type (data type) [int] # for your own use, store bitflags to specify attribute
254 dval (data value) [python pickle]
255 dmsg (data message)[multi message]
256
257 **Configuration**::
258
259 data_prefix: will prefix every value name when setting and getting values - this allows
260 several clients to use the same storage attribute (on the same node for example)
261 It acts like a namespace
262 attr_prefix: prefixes the actual maya attribute to access
263 maya_node: the maya node holding the actual attributes
264
265 :note: A mrv node should derive from this class to allow easy attribute access of its
266 own compatible attributes - its designed for flexiblity
267 :note: attribute accepts on the generic attribute should be set by a plugin node when it
268 creates its attributes
269 :todo: should self._node be stored as weakref ?"""
270 kValue, kMessage, kStorage, kFlags = range(4)
271 kPartitionIdAttr = "bda_storagePartition"
272
274 """Wrapper object prividing native access to the wrapped python pickle object
275 and to the corresponding value plug, providing utlity methods for easier handling"""
276 __slots__ = ('_plug', '_pydata', '_isReferenced', '_updateCalled')
277
278 - def __init__(self, valueplug, pythondata):
279 """value plug contains the plugin data in pythondata"""
280 sa = object.__setattr__
281 sa(self, '_plug', valueplug)
282 sa(self, '_pydata', pythondata)
283
284 sa(self, '_isReferenced', MFnDependencyNode(valueplug.node()).isFromReferencedFile())
285 sa(self, '_updateCalled', False)
286
288 return len(self._pydata)
289
291 return iter(self._pydata)
292
294 return getattr(self._pydata, attr)
295
297 try:
298 object.__setattr__(self, attr, val)
299 except AttributeError:
300 self._pydata[attr] = val
301
303 return self._pydata[key]
304
309
311 del(self._pydata[key])
312
314 """Will be called automatically if the underlying value changed if
315 the node of the underlying plug is referenced
316
317 :note: this method will only be called once during the lifetime of this object if it changes,
318 as its enough to trigger reference to write the value if it changes once.
319 Getting and setting data is expensive as there is a tracking dict in the background
320 being spawned with internally created copies."""
321 if self._updateCalled:
322 return
323 self._plug.msetMObject(self._plug.asMObject())
324 self._updateCalled = True
325
326
327
329 """:return: True if the data is from a referenced plug"""
330 return self._isReferenced
331
332
333
334
335 __slots__ = ('_dprefix', '_aprefix', '_node')
336
337
338 - def __init__(self, data_prefix='', maya_node = None, attr_prefix=''):
339 """Allows customization of this base to modify its behaviour
340 :note: see more information on the input attributes in the class description"""
341
342 object.__init__(self)
343 self._dprefix = data_prefix
344 self._aprefix = attr_prefix
345 self._node = maya_node
346 if not maya_node:
347 if not isinstance(self, Node):
348 raise TypeError("StorageNode's derived class must be an instance of type %r if maya_node is not given" % Node)
349 self._node = self
350
351
352
353
354
356 """Create a new instance with our type"""
357 return self.__class__(self._dprefix, self._node, self._aprefix)
358
359 - def copyFrom(self, other, *args, **kwargs):
360 """Copy all values from other to ourselves
361
362 :param kwargs:
363 * shallow:
364 if True, default False, only a shallow copy will
365 be made. If False, a deep copy will be made
366 :note: only does so if the attribute and data prefixes actually match (which should be
367 the case if we get here, checking for it anyway
368 :note: as pickle data always copies by reference to be efficient, we have to explicitly
369 create new data to assure we really copy it
370 :todo: copy connections to our messages as well, make it an option at least"""
371 self.setDataPrefix(other.dataPrefix())
372 self.setAttributePrefix(other.attributePrefix())
373
374 shallow = kwargs.pop("shallow", False)
375 for dataid in other.dataIDs():
376 othervalplug = other.storagePlug(dataid, plugType = self.kValue, autoCreate = False)
377 ownvalplug = self.storagePlug(dataid, plugType = self.kValue, autoCreate = True)
378
379 clearDataPlug(ownvalplug)
380
381 if shallow:
382 ownvalplug.msetMObject(othervalplug.asMObject())
383 else:
384 owndict = self.pythonDataFromPlug(ownvalplug)
385 otherdict = other.pythonDataFromPlug(othervalplug)
386
387
388 for key in otherdict:
389 val = otherdict[key]
390 if isinstance(val, iDuplicatable):
391 owndict[key] = val.duplicate()
392 else:
393
394 try:
395 owndict[key] = copy.deepcopy(val)
396 except copy.Error:
397 owndict[key] = val
398
399
400
401
402
403
404
405
406
408 """see ``makePlug`` module level function"""
409 return makePlug(self.masterPlug(), self._dprefix + dataID)
410
411 @undoable
413 """see ``clearAllData`` module level method
414 :note: use this method if you want to make sure your node
415 is empty after it has been duplicated (would usually be done in the postContructor"""
416 return clearAllData(self.masterPlug())
417
418 @undoable
420 """Clear all data stored in the given dataID"""
421 try:
422 valueplug = self.storagePlug(dataID, plugType=self.kValue, autoCreate = False)
423 except AttributeError:
424 return
425 else:
426 clearDataPlug(valueplug)
427
428
429
430
431
432
433
434 - def _elementPlug(self, dataID, dataType, autoCreate=False):
435 """:return: element plug of the given type"""
436 return storagePlug(self.masterPlug(), self._dprefix + dataID, dataType, autoCreate)
437
439 """:return: compound plug with given dataID or None"""
440 return findStoragePlug(self.masterPlug(), self._dprefix + dataID)
441
443 """:return: master plug according to our attributePrefix"""
444 return self._node.findPlug(self._aprefix + 'dta')
445
447 """see module level function with the same name"""
448 return dataIDs(self.masterPlug(), self._dprefix)
449
450 - def storagePlug(self, dataID, plugType = None, autoCreate=False):
451 """see ``storagePlug`` module level function"""
452 return storagePlug(self.masterPlug(), self._dprefix+dataID, plugType, autoCreate)
453
454
455
456
457
459 """:return: PyPickleVal object at the given index (it can be modified natively)
460 :param dataID: id of of the data to retrieve
461 :param kwargs:
462 * index:
463 element number of the plug to retrieve, or -1 to get a new plug.
464 Plugs will always be created, the given index specifies a logical plug index
465 * Additionally all arguments supported by `storagePlug`"""
466 return self.pythonDataFromPlug(self._elementPlug(dataID, StorageBase.kValue, **kwargs))
467
468 @classmethod
470 """Exract the python data using the given plug directly
471
472 :param valplug: data value plug containing the plugin data
473 :return: PyPickleData object allowing data access"""
474
475
476
477 try:
478 plugindata = valplug.masData()
479 except RuntimeError:
480
481 plugindataobj = api.MFnPluginData().create(PyPickleData.kPluginDataId)
482
483
484 valplug.msetMObject(plugindataobj)
485 plugindata = Data(plugindataobj)
486
487
488
489 return StorageBase.PyPickleValue(valplug, plugindata.data())
490
491
492
493
494 @undoable
495 - def objectSet(self, dataID, setIndex, autoCreate = True):
496 """see module level ``objectSet`` function"""
497 return objectSet(self._elementPlug(dataID, self.kMessage, autoCreate), setIndex, autoCreate, dataID)
498
499 @undoable
501 """See the module level method called ``deleteObjectSet``"""
502 try:
503 return deleteObjectSet(self._elementPlug(dataID, self.kMessage, autoCreate = False), setIndex)
504 except (ValueError, AttributeError):
505
506 raise
507 return
508
509
511 """:return: all object sets stored under the given dataID"""
512 return setsByPlug(self._elementPlug(dataID, self.kMessage, autoCreate=False))
513
514
515 @undoable
517 """see ``setPartition`` function on module level"""
518 return setPartition(self._elementPlug(dataID, self.kMessage, autoCreate=False), state)
519
520
522 """:return: partition Node attached to the sets at dataID or None if state is disabled"""
523 return partition(self._elementPlug(dataID, self.kMessage, autoCreate=False))
524
525
526
527
528
530 """:return: Node actually being used as storage"""
531 return self._node
532
534 """Set ourselves to use the given storage compatible node
535
536 :note: use this if the path of our instance has changed - otherwise
537 trying to access functions will fail as the path of our node might be invalid"""
538 self._node = node
539
541 """Change the data prefix to the given value string"""
542 self._dprefix = prefix
543
545 """:return: our data prefix"""
546 return self._dprefix
547
549 """:return: our attribute prefix"""
550 return self._aprefix
551
553 """Change the prefix with which to access to the actual storage data attributes on our node
554 to the given string"""
555 self._aprefix = prefix
556
557
558
559
560 -class StorageNode(DependNode, StorageBase):
561 """This node can be used as pythonic and easy-to-access value container - it could
562 be connected to your node, and queried for values actually being queried on your node.
563 As value container, it can easily be replaced by another one, or keep different sets of information
564
565 :note: the storage node can only use generic attributes and recover them properly during scene reload
566 if the configuration of the generic attributes have been setup properly - they are unique only per
567 node type, not per instance of the node type.
568 Thus it is recommened to use the storage node attribute base on your own custom type that setsup the
569 generic attributes as it requires during plugin load"""
570
571
576
577
578
579
580
581