mrv.maya.nt.storage
Covered: 369 lines
Missed: 30 lines
Skipped 182 lines
Percent: 92 %
  2
"""Contains an implementation for the Persistence plugin for easy access within 
  3
mrv and derived nodes.
  4
"""
  5
__docformat__ = "restructuredtext"
  7
import os
  8
from persistence import PyPickleData
  9
import maya.OpenMaya as api
 11
import mrv.maya.undo as undo
 12
from mrv.util import iDuplicatable
 14
from base import Node, DependNode, Data, createNode, delete
 15
from set import ObjectSet
 17
import copy
 19
MFnDependencyNode = api.MFnDependencyNode
 21
__all__ = ("StorageBase", "StorageNode")
 27
def findStoragePlug(masterPlug, dataID):
 28
	""":return: compound plug containing all data and connections for the given dataID
 29
	:param masterPlug: compound plug containing all data"""
 30
	for compoundplug in masterPlug:
 31
		if compoundplug.child(0).asString() == dataID:
 32
			return compoundplug
 34
	return None
 36
@undoable
 37
def _makeElementPlug(masterPlug, dataID):
 38
	"""Find an empty logical plug index and return the newly created
 39
	logical plug with given dataID - unconditionally"""
 40
	elementPlug = masterPlug.mnextLogicalPlug()
 41
	elementPlug.child(0).msetString(dataID)
 42
	return elementPlug
 44
@undoable
 45
def storagePlug(masterPlug, dataID, plugType = None, autoCreate=False):
 46
	"""
 47
	:return: plug of the given type, either as tuple of two plugs or the plug
 48
		specified by plugType
 49
	:param masterPlug: masterPlug to search for data
 50
	:param dataID: the name of the plug to be returned
 51
	:param plugType:
 52
		StorageBase.kMessage: return message array plug only
 53
		StorageBase.kValue: return python pickle array plug only
 54
		StorageBase.kStorage: return the storage plug itself containing message and the value plug
 55
		StorageBase.kFlags return plug to integer which can be used as storage for bitflags to accompany the id
 56
		None: return (picklePlug , messagePlug)
 57
	:param autoCreate: if True, a plug with the given dataID will be created if it does not
 58
		yet exist
 59
	:raise AttributeError: if a plug with dataID does not exist and default value is None
 60
	:raise TypeError: if  plugtype unknown """
 61
	matchedplug = findStoragePlug(masterPlug, dataID)
 62
	if matchedplug is None:
 63
		if autoCreate:
 64
			matchedplug = _makeElementPlug(masterPlug, dataID)
 65
		else:
 66
			raise AttributeError("Plug with id %s not found" % dataID)
 70
	if plugType is None:
 71
		return (matchedplug.child(2), matchedplug.child(3))
 72
	elif plugType == StorageBase.kStorage:
 73
		return matchedplug
 74
	elif plugType == StorageBase.kValue:
 75
		return matchedplug.child(2)
 76
	elif plugType == StorageBase.kMessage:
 77
		return matchedplug.child(3)
 78
	elif plugType == StorageBase.kFlags:
 79
		return matchedplug.child(1)
 80
	else:
 81
		raise TypeError("Invalid plugType value: %s" % plugType)
 84
@undoable
 85
def makePlug(masterPlug, dataID):
 86
	"""retrieve or create a plug that corresponds to the given dataID
 87
	:param dataID: string identifier
 88
	:return: the created data plug, containing subplugs dval and dmsg
 89
		for generic data and  message connections respectively """
 90
	existingPlug = findStoragePlug(masterPlug, dataID)
 91
	if existingPlug is not None:
 92
		return existingPlug
 95
	return _makeElementPlug(masterPlug, dataID)
 97
def setsByPlug(mdplug):
 98
	""":return: all objectSets stored at the given message data plug"""
 99
	allnodes = [p.mwrappedNode() for p in mdplug.minputs()]
100
	return [n for n in allnodes if isinstance(n, ObjectSet)]
102
def partition(mdplug):
103
	""":return: parition node attached to the sets of the given message data plug or None"""
104
	sets = setsByPlug(mdplug)
107
	partitions = list()
108
	for s in sets:
109
		partitions.extend(s.partitions())
111
	for p in partitions:
112
		if hasattr(p, StorageBase.kPartitionIdAttr):
113
			return p
117
	return None
119
@undoable
120
def objectSet(mdplug, setIndex, autoCreate=True, setPrefix=''):
121
	"""Get an object set identified with setIndex at the given dataId
123
	:param mdplug: data message plug whose object set to handle
124
	:param setIndex: logical index at which the set will be connected to our message plug array
125
	:param autoCreate: if True, a set will be created if it does not yet exist
126
	:param setPrefix: if given, the string will be used as prefix for the name of newly created
127
		object sets
128
	:raises ValueError: if a set does not exist at setIndex and autoCreate is False
129
	:raises AttributeError: if the plug did not exist (and autocreate is False)
130
	:note: method is implicitly undoable if autoCreate is True, this also means that you cannot
131
		explicitly undo this operation as you do not know if undo has been queued or not
132
	:note: newly created sets will automatically use partitions if one of the sets does"""
133
	mp = mdplug
136
	setplug = mp.elementByLogicalIndex(setIndex)
137
	inputplug = setplug.minput()
138
	if inputplug.isNull():
139
		if not autoCreate:
140
			raise AttributeError("Set at %s[%i] did not exist" % (mp.name(), setIndex))
141
		su = undo.StartUndo()			# make the following operations atomic
142
		objset = createNode(setPrefix + "Set", "objectSet", forceNewLeaf = True)
143
		inputplug = objset.message
144
		inputplug.mconnectTo(setplug)
147
		if partition(mdplug):
148
			setPartition(mdplug, True)
152
	return inputplug.mwrappedNode()
154
def dataIDs(masterPlug, data_prefix=''):
155
	"""
156
	:return: list of all data ids available in the given master plug
157
	:param data_prefix: the string prefix of data names which must match with the prefix
158
		of the data id to be returned, with the matching prefix pruned. 
159
		By default, all data ids will match"""
160
	outids = list()
161
	for compoundplug in masterPlug:
162
		did = compoundplug.child(0).asString()
163
		if did.startswith(data_prefix):
164
			outids.append(did[len(data_prefix):])
167
	return outids
169
@undoable
170
def setPartition(mdplug, state):
171
	"""Make all sets of the given data message plug use a partition or not
172
	:param state: if True, a partition will be used, if False, it will be disabled
173
	:note: this method makes sure that all sets are hooked up to the partition
174
	:raise ValueError: If we did not have a single set to which to add to the partition
175
	:raise AttributeError: If the dataID has never had sets
176
	:return: if state is True, the name of the possibly created (or existing) partition"""
177
	sets = setsByPlug(mdplug)
178
	pt = partition(mdplug)
180
	if state:
181
		if pt is None:
182
			if not sets:
183
				raise ValueError("Cannot create partition as plug %s did not have any connected sets" % mdplug)
186
			pt = createNode("storagePartition", "partition", forceNewLeaf=True)
188
			tattr = api.MFnTypedAttribute()
189
			attr = tattr.create(StorageBase.kPartitionIdAttr, "pid", api.MFnData.kString)
190
			pt.addAttribute(attr)
194
		pt.addSets(sets)
195
		return pt
196
	else:
197
		if pt:
200
			pt.clear()
201
			delete(pt)
206
def clearDataPlug(vdplug):
207
	"""Clear the data in the given value data plug"""
208
	plugindataobj = api.MFnPluginData().create(PyPickleData.kPluginDataId)
209
	vdplug.msetMObject(plugindataobj)
211
@undoable
212
def clearAllData(masterPlug):
213
	"""Empty all data storage plugs beneath the given masterPlug. Message connections are currently
214
	not affected"""
215
	for compoundplug in masterPlug:
216
		clearDataPlug(compoundplug.child(2))
219
@undoable
220
def deleteObjectSet(mdplug, setIndex):
221
	"""Delete the object set at the given message data plug, at the given setIndex
222
	:note: use this method to delete your sets instead of manual deletion as it will automatically
223
		remove the managed partition in case the last set is being deleted"""
224
	try:
225
		objset = objectSet(mdplug, setIndex, autoCreate = False)
226
	except AttributeError:
228
		return
229
	else:
231
		if len(setsByPlug(mdplug)) == 1:
232
			setPartition(mdplug, False)
234
		delete(objset)
241
class StorageBase(iDuplicatable):
242
	"""A storage node contains a set of attributes allowing it to store
243
	python data and objects being stored in a pickled format upon file save.
244
	Additionally you can store connections.
245
	Nodes used with this interface must be compatible to the following attribute scheme.
246
	To create that scheme, use `createStorageAttribute`
248
	**Attribute Setup**::
250
		(shortname (description) [data type])
251
		dta (data)[multi compound]
252
			id (data id)[string]
253
			type (data type) [int]	# for your own use, store bitflags to specify attribute
254
			dval (data value) [python pickle]
255
			dmsg (data message)[multi message]
257
	**Configuration**::
259
		data_prefix: will prefix every value name when setting and getting values - this allows
260
			several clients to use the same storage attribute (on the same node for example)
261
			It acts like a namespace
262
		attr_prefix: prefixes the actual maya attribute to access
263
		maya_node: the maya node holding the actual attributes
265
	:note: A mrv node should derive from this class to allow easy attribute access of its
266
		own compatible attributes - its designed for flexiblity
267
	:note: attribute accepts on the generic attribute should be set by a plugin node when it
268
		creates its attributes
269
	:todo: should self._node be stored as weakref ?"""
270
	kValue, kMessage, kStorage, kFlags = range(4)
271
	kPartitionIdAttr = "bda_storagePartition"		# may not change !
273
	class PyPickleValue(object):
274
		"""Wrapper object prividing native access to the wrapped python pickle object
275
		and to the corresponding value plug, providing utlity methods for easier handling"""
276
		__slots__ = ('_plug', '_pydata', '_isReferenced', '_updateCalled')
278
		def __init__(self, valueplug, pythondata):
279
			"""value plug contains the plugin data in pythondata"""
280
			sa = object.__setattr__ 
281
			sa(self, '_plug', valueplug)
282
			sa(self, '_pydata', pythondata)
284
			sa(self, '_isReferenced', MFnDependencyNode(valueplug.node()).isFromReferencedFile())
285
			sa(self, '_updateCalled', False)
287
		def __len__(self):
288
			return len(self._pydata)
290
		def __iter__(self):
291
			return iter(self._pydata)
293
		def __getattr__(self, attr):
294
			return getattr(self._pydata, attr)
296
		def __setattr__(self, attr, val):
297
			try:
298
				object.__setattr__(self, attr, val)
299
			except AttributeError:
300
				self._pydata[attr] = val
302
		def __getitem__(self, key):
303
			return self._pydata[key]
305
		def __setitem__(self, key, value):
306
			self._pydata[key] = value
307
			if self._isReferenced:
308
				self._valueChanged()		# assure we make it into the reference , but only if we change
310
		def __delitem__(self, key):
311
			del(self._pydata[key])
313
		def _valueChanged(self):
314
			"""Will be called automatically if the underlying value changed if
315
			the node of the underlying plug is referenced
317
			:note: this method will only be called once during the lifetime of this object if it changes,
318
				as its enough to trigger reference to write the value if it changes once.
319
				Getting and setting data is expensive as there is a tracking dict in the background
320
				being spawned with internally created copies."""
321
			if self._updateCalled:
322
				return
323
			self._plug.msetMObject(self._plug.asMObject())
324
			self._updateCalled = True
328
		def isReferenced(self):
329
			""":return: True if the data is from a referenced plug"""
330
			return self._isReferenced
335
	__slots__ = ('_dprefix', '_aprefix', '_node')
338
	def __init__(self, data_prefix='', maya_node = None, attr_prefix=''):
339
		"""Allows customization of this base to modify its behaviour
340
		:note: see more information on the input attributes in the class description"""
342
		object.__init__(self)
343
		self._dprefix = data_prefix
344
		self._aprefix = attr_prefix
345
		self._node = maya_node
346
		if not maya_node:
347
			if not isinstance(self, Node):
348
				raise TypeError("StorageNode's derived class must be an instance of type %r if maya_node is not given" % Node)
349
			self._node = self
355
	def createInstance(self, *args, **kwargs):
356
		"""Create a new instance with our type"""
357
		return self.__class__(self._dprefix, self._node, self._aprefix)
359
	def copyFrom(self, other, *args, **kwargs):
360
		"""Copy all values from other to ourselves
362
		:param kwargs:
363
			 * shallow:
364
			 	if True, default False, only a shallow copy will
365
				be made. If False, a deep copy will be made
366
		:note: only does so if the attribute and data prefixes actually match (which should be
367
			the case if we get here, checking for it anyway
368
		:note: as pickle data always copies by reference to be efficient, we have to explicitly
369
			create new data to assure we really copy it
370
		:todo: copy connections to our messages as well, make it an option at least"""
371
		self.setDataPrefix(other.dataPrefix())
372
		self.setAttributePrefix(other.attributePrefix())
374
		shallow = kwargs.pop("shallow", False)
375
		for dataid in other.dataIDs():
376
			othervalplug = other.storagePlug(dataid, plugType = self.kValue, autoCreate = False)
377
			ownvalplug = self.storagePlug(dataid, plugType = self.kValue, autoCreate = True)
379
			clearDataPlug(ownvalplug)
381
			if shallow:
382
				ownvalplug.msetMObject(othervalplug.asMObject())
383
			else:
384
				owndict = self.pythonDataFromPlug(ownvalplug)
385
				otherdict = other.pythonDataFromPlug(othervalplug)
388
				for key in otherdict:
389
					val = otherdict[key]
390
					if isinstance(val, iDuplicatable):
391
						owndict[key] = val.duplicate()
392
					else:
394
						try:
395
							owndict[key] = copy.deepcopy(val)
396
						except copy.Error:
397
							owndict[key] = val
407
	def makePlug(self, dataID):
408
		"""see ``makePlug`` module level function"""
409
		return makePlug(self.masterPlug(), self._dprefix + dataID)
411
	@undoable
412
	def clearAllData(self):
413
		"""see ``clearAllData`` module level method
414
		:note: use this method if you want to make sure your node
415
			is empty after it has been duplicated (would usually be done in the postContructor"""
416
		return clearAllData(self.masterPlug())
418
	@undoable
419
	def clearData(self, dataID):
420
		"""Clear all data stored in the given dataID"""
421
		try:
422
			valueplug = self.storagePlug(dataID, plugType=self.kValue, autoCreate = False)
423
		except AttributeError:
424
			return
425
		else:
426
			clearDataPlug(valueplug)
434
	def _elementPlug(self, dataID, dataType, autoCreate=False):
435
		""":return: element plug of the given type"""
436
		return storagePlug(self.masterPlug(), self._dprefix + dataID, dataType, autoCreate)
438
	def findStoragePlug(self, dataID):
439
		""":return: compound plug with given dataID or None"""
440
		return findStoragePlug(self.masterPlug(), self._dprefix + dataID)
442
	def masterPlug(self):
443
		""":return: master plug according to our attributePrefix"""
444
		return self._node.findPlug(self._aprefix + 'dta')
446
	def dataIDs(self):
447
		"""see module level function with the same name"""
448
		return dataIDs(self.masterPlug(), self._dprefix)
450
	def storagePlug(self, dataID, plugType = None, autoCreate=False):
451
		"""see ``storagePlug`` module level function"""
452
		return storagePlug(self.masterPlug(), self._dprefix+dataID, plugType, autoCreate)
458
	def pythonData(self, dataID, **kwargs):
459
		""":return: PyPickleVal object at the given index (it can be modified natively)
460
		:param dataID: id of of the data to retrieve
461
		:param kwargs:
462
			 * index: 
463
			 	element number of the plug to retrieve, or -1 to get a new plug.
464
				Plugs will always be created, the given index specifies a logical plug index
465
			 * Additionally all arguments supported by `storagePlug`""" 
466
		return self.pythonDataFromPlug(self._elementPlug(dataID, StorageBase.kValue, **kwargs))
468
	@classmethod
469
	def pythonDataFromPlug(cls, valplug):
470
		"""Exract the python data using the given plug directly
472
		:param valplug: data value plug containing the plugin data
473
		:return: PyPickleData object allowing data access"""
477
		try:
478
			plugindata = valplug.masData()
479
		except RuntimeError:
481
			plugindataobj = api.MFnPluginData().create(PyPickleData.kPluginDataId)
484
			valplug.msetMObject(plugindataobj) # use original version only - no undo support
485
			plugindata = Data(plugindataobj)
489
		return StorageBase.PyPickleValue(valplug, plugindata.data())
494
	@undoable
495
	def objectSet(self, dataID, setIndex, autoCreate = True):
496
		"""see module level ``objectSet`` function"""
497
		return objectSet(self._elementPlug(dataID, self.kMessage, autoCreate), setIndex, autoCreate, dataID)
499
	@undoable
500
	def deleteObjectSet(self, dataID, setIndex):
501
		"""See the module level method called ``deleteObjectSet``"""
502
		try:
503
			return deleteObjectSet(self._elementPlug(dataID, self.kMessage, autoCreate = False), setIndex)
504
		except (ValueError, AttributeError):
506
			raise
507
			return
510
	def setsByID(self, dataID):
511
		""":return: all object sets stored under the given dataID"""
512
		return setsByPlug(self._elementPlug(dataID, self.kMessage, autoCreate=False))
515
	@undoable
516
	def setPartition(self, dataID, state):
517
		"""see ``setPartition`` function on module level"""
518
		return setPartition(self._elementPlug(dataID, self.kMessage, autoCreate=False), state)
521
	def partition(self, dataID):
522
		""":return: partition Node attached to the sets at dataID or None if state is disabled"""
523
		return partition(self._elementPlug(dataID, self.kMessage, autoCreate=False))
529
	def storageNode(self):
530
		""":return: Node actually being used as storage"""
531
		return self._node
533
	def setStorageNode(self, node):
534
		"""Set ourselves to use the given storage compatible node
536
		:note: use this if the path of our instance has changed - otherwise
537
			trying to access functions will fail as the path of our node might be invalid"""
538
		self._node = node
540
	def setDataPrefix(self, prefix):
541
		"""Change the data prefix to the given value string"""
542
		self._dprefix = prefix
544
	def dataPrefix(self):
545
		""":return: our data prefix"""
546
		return self._dprefix
548
	def attributePrefix(self):
549
		""":return: our attribute prefix"""
550
		return self._aprefix
552
	def setAttributePrefix(self, prefix):
553
		"""Change the prefix with which to access to the actual storage data attributes on our node
554
		to the given string"""
555
		self._aprefix = prefix
560
class StorageNode(DependNode, StorageBase):
561
	"""This node can be used as pythonic and easy-to-access value container - it could
562
	be connected to your node, and queried for values actually being queried on your node.
563
	As value container, it can easily be replaced by another one, or keep different sets of information
565
	:note: the storage node can only use generic attributes and recover them properly during scene reload
566
		if the configuration of the generic attributes have been setup properly - they are unique only per
567
		node type, not per instance of the node type.
568
		Thus it is recommened to use the storage node attribute base on your own custom type that setsup the
569
		generic attributes as it requires during plugin load"""
572
	def __init__(self, *args):
573
		"""initialize bases properly"""
574
		DependNode.__init__(self)
575
		StorageBase.__init__(self)