mrv.dge
Covered: 782 lines
Missed: 54 lines
Skipped 477 lines
Percent: 93 %
   2
"""Contains a simple but yet powerful dependency graph engine allowing computations
   3
to be organized more efficiently.
   4
"""
   5
__docformat__ = "restructuredtext"
   7
import networkx as nx
   8
from collections import deque
   9
import inspect
  10
import weakref
  11
import itertools
  12
from util import iDuplicatable
  14
__all__ = ("ConnectionError", "PlugIncompatible", "PlugAlreadyConnected", "AccessError",
  15
           "NotWritableError", "NotReadableError", "MissingDefaultValueError", "ComputeError", 
  16
           "ComputeFailed", "ComputeFailed", "PlugUnhandled", 
  17
           "iterShells", "Attribute", "iPlug", "plug", "Graph", "NodeBase")
  24
class ConnectionError( Exception ):
  25
	"""Exception base for all plug related errors"""
  27
class PlugIncompatible( ConnectionError, TypeError ):
  28
	"""Thrown whenever plugs are not compatible with each other during connection"""
  30
class PlugAlreadyConnected( ConnectionError ):
  31
	"""Thrown if one tries to connect a plug to otherplug when otherplug is already connected"""
  33
class AccessError( Exception ):
  34
	"""Base class for all errors indicating invalid access"""
  36
class NotWritableError( AccessError ):
  37
	"""Thrown if a non-writable plug is being written to"""
  39
class NotReadableError( AccessError ):
  40
	"""Thrown if a non-readable attribute is being read"""
  42
class MissingDefaultValueError( AccessError ):
  43
	"""Thrown if a default value is missing for input attributes that are not connected"""
  45
class ComputeError( Exception ):
  46
	"""Thrown if the computation done by a plug failed by an unknown exception
  47
	It will be passed on in the exception"""
  49
class ComputeFailed( ComputeError ):
  50
	"""Raised by the derived class computing a value if the computational goal
  51
	cannot be achieved ( anymore )"""
  53
class PlugUnhandled( ComputeError ):
  54
	"""Raised if a plug was not handled by the node's compute method"""
  64
def iterShells( rootPlugShell, stopAt = lambda x: False, prune = lambda x: False,
  65
			   direction = "up", visit_once = False, branch_first = False ):
  66
	"""Iterator starting at rootPlugShell going "up"stream ( input ) or "down"stream ( output )
  67
	breadth first over plugs, applying filter functions as defined.
  69
	:param rootPlugShell: shell at which to start the traversal. The root plug will be returned as well
  70
	:param stopAt: if function returns true for given PlugShell, iteration will not proceed
  71
		at that point ( possibly continuing at other spots ). Function will always be called, even
  72
		if the shell would be pruned as well. The shell serving as stop marker will not be returned
  73
	:param prune: if function returns true for given PlugShell, the shell will not be returned
  74
		but iteration continues.
  75
	:param direction: traversal direction
  76
			"up" upstream, in direction of inputs of plugs
  77
			"down" downstream, in direction of outputs of plugs
  78
	:param visit_once: if True, plugs will only be returned once, even though they are
  79
	:param branch_first: if True, individual branches will be travelled first ( thuse the node will be left quickly following the datastream ).
  80
			If False, the plugs on the ndoe will be returned first before proceeding to the next node
  81
			encountered several times as several noodes are connected to them in some way. """
  82
	visited = set()
  83
	stack = deque()
  84
	stack.append( rootPlugShell )
  86
	def addToStack( node, stack, lst, branch_first ):
  87
		if branch_first:
  88
			stack.extend( node.toShell( plug ) for plug in lst )
  89
		else:
  90
			reviter = ( node.toShell( lst[i] ) for i in range( len( lst )-1,-1,-1) )
  91
			stack.extendleft( reviter )
  94
	def addOutputToStack( stack, lst, branch_first ):
  95
		if branch_first:
  96
			stack.extend( lst )
  97
		else:
  98
			stack.extendleft( reversed( lst[:] ) )
 101
	while stack:
 102
		shell = stack.pop()
 103
		if shell in visited:
 104
			continue
 106
		if visit_once:
 107
			visited.add( shell )
 109
		if stopAt( shell ):
 110
			continue
 112
		if not prune( shell ):
 113
			yield shell
 115
		if direction == 'up':
 117
			addToStack( shell.node, stack, shell.plug.affectedBy(), branch_first )
 121
			ishell = shell.input( )
 122
			if ishell:
 123
				if branch_first:
 124
					stack.append( ishell )
 125
				else:
 126
					stack.appendleft( ishell )
 129
		else:
 132
			if branch_first:
 134
				addToStack( shell.node, stack, shell.plug.affected(), branch_first )
 135
				addOutputToStack( stack, shell.outputs(), branch_first )
 136
			else:
 137
				addOutputToStack( stack, shell.outputs(), branch_first )
 138
				addToStack( shell.node, stack, shell.plug.affected(), branch_first )
 152
class Attribute( object ):
 153
	"""Simple class defining the type of a plug and several flags that
 154
	affect it. Additionally it can determine how well suited another attribute is
 156
	**Flags**:
 157
		exact_type: if True, derived classes of our typecls are not considered to be a valid type.
 158
		i.e: basestring could be stored in a str attr if exact type is false - its less than we need, but
 159
		still something.
 160
		Putting a str into a basestring attribute will always work though, as it would be more than we need
 161
		readonly: if True, the attribute's plug cannot be written to. Read-only attributes can be used
 162
		as storage that the user can read, but not write.
 163
		You can write read-only plugs by directly setting its cache - this of course - is only
 164
		for the node itself, but will never be done by the framework
 166
	**computable**:
 167
		Nodes are automatically computable if they are affected by another plug.
 168
		If this is not the case, they are marked input only and are not computed.
 169
		If this flag is true, even unaffeted plugs are computable.
 170
		Plugs that affect something are automatically input plugs and will not be computed.
 171
		If the plug does not affect anything and this flag is False, they are seen as input plugs
 172
		anyway.
 174
		The system does not allow plugs to be input and output plugs at the same time, thus your compute
 175
		cannot be triggered by your own compute
 177
		cls: if True, the plug requires classes to be set ( instances of 'type' ) , but no instances of these classes
 178
		uncached: if False, computed values may be cached, otherwise they will always be recomputed.
 179
		unconnectable: if True, the node cannot be the destination of a connection
 180
		check_passing_values: check each value as it flows through a connection - usually compatability is only checked
 181
		on connection and once values are set, but not if they flow through an existing connection
 183
	**Default Values**:
 184
		Although default values can be simple primitives are classes, a callable is specifically supported.
 185
		It allows you to get a callback whenever a default value is required.
 186
		The same result could be achieved by connected the plug in question, but dynamic defaults are a quick
 187
		way to achive that.
 188
		Your returned value will be type-checked against the required type if check_passing_values is set."""
 189
	kNo, kGood, kPerfect = 0, 127, 255				# specify how good attributes fit together
 190
	exact_type, readonly, computable, cls, uncached, unconnectable,check_passing_values = ( 1, 2, 4, 8, 16, 32, 64 )
 192
	def __init__( self, typeClass, flags, default = None ):
 193
		self.typecls = typeClass
 194
		self.flags = flags			# used for bitflags describing mode
 195
		self._default = default
 198
		if default is not None:
 199
			if not hasattr( default, '__call__' ) and self.compatabilityRate( default ) == 0:
 200
				raise TypeError( "Default value %r is not compatible with this attribute" % default )
 203
	def _getClassRating( self, cls, exact_type ):
 204
		""" compute class rating
 206
		:return: rating based on value being a class and compare.
 207
				0 means there is no type compatability, 255 matches comparecls, or linearly 
 208
				less if is just part of the mro of value
 209
		"""
 210
		if not isinstance( cls, type ):
 211
			return 0
 213
		mro = self.typecls.mro()
 214
		mro.reverse()
 216
		if not cls in mro:
 219
			if not exact_type and self.typecls in cls.mro():
 220
				return self.kPerfect
 221
			return 0
 224
		if len( mro ) == 1:
 225
			return self.kPerfect
 227
		rate = ( float( mro.index( cls ) ) / float( len( mro ) - 1 ) ) * self.kPerfect
 229
		if exact_type and rate != self.kPerfect:		# exact type check
 230
			return 0
 232
		return rate
 236
	def affinity( self, otherattr ):
 237
		"""Compute affinity for otherattr.
 239
		:return: 
 240
			rating from 0 to 255 defining how good the attribtues match each
 241
			other in general - how good can we store values of otherattr ? 
 242
			Thus this comparison is directed.
 244
		:note: for checking connections, use `connectionAffinity`"""
 246
		if self.flags & self.cls != otherattr.flags & self.cls:
 247
			return 0
 253
		rate = self.kNo
 254
		try:
 255
			defvalue = otherattr.default()
 256
			rate = self.compatabilityRate( defvalue )
 257
		except (MissingDefaultValueError,TypeError):
 258
			rate = self._getClassRating( otherattr.typecls, self.flags & self.exact_type )
 261
		return rate
 263
	def connectionAffinity( self, destinationattr ):
 264
		"""Compute connection affinity for given destination attribute
 266
		:return: 
 267
			rating from 0 to 255 defining the quality of the connection to
 268
			otherplug. an affinity of 0 mean connection is not possible, 255 mean the connection
 269
			is perfectly suited.
 270
			The connection is a directed one from self -> otherplug """
 271
		if destinationattr.flags & self.unconnectable:		# destination must be connectable
 272
			return 0
 275
		return destinationattr.affinity( self )
 277
	def compatabilityRate( self, value ):
 278
		"""Compute value's compatability rate
 280
		:return: value between 0 and 255, 0 means no compatability, 255 a perfect match. 
 281
			if larger than 0, the plug can hold the value ( assumed the flags are set correctly ). """
 282
		if isinstance( value, type ):
 284
			if not self.flags & self.cls:
 285
				return 0		# its a class
 288
			return self._getClassRating( value, self.flags & self.exact_type )
 290
		else:
 291
			if not self.flags & self.cls:
 292
				return self._getClassRating( value.__class__, self.flags & self.exact_type )
 295
		return 0
 297
	def default( self ):
 298
		""":return: default value stored for this attribute, or raise
 299
		:note: handles dynamic defaults, so you should not directly access the default member variable
 300
		:raise MissingDefaultValueError: if attribute does not have a default value
 301
		:raise TypeError: if value returned by dynamic attribute has incorrect type"""
 302
		if self._default is None:
 303
			raise MissingDefaultValueError( "Attribute %r has no default value" % self )
 307
		if hasattr( self._default, '__call__' ):
 308
			default = self._default()
 309
			if self.flags & self.check_passing_values and self.compatabilityRate( default ) == 0:
 310
				raise TypeError( "Dynamic default value had incorrect type: %s" % type( default ) )
 311
			return default
 315
		return self._default
 322
class iPlug( object ):
 323
	"""Defines an interface allowing to compare compatabilies according to types.
 325
	Plugs can either be input plugs or output plugs - output plugs affect no other
 326
	plug on a node, but are affected by 0 or more plugs .
 328
	By convention, a plug has a name - that name must also be the name of the
 329
	member attribute that stores the plag. Plugs, possibly different instances of it,
 330
	need to be re-retrieved on freshly duplicated nodes to allow graph duplication to
 331
	be done properly
 333
	:note: if your plug class supports the ``setName`` method, a metaclass will
 334
		adjust the name of your plug to match the name it has in the parent class
 335
	"""
 336
	kNo,kGood,kPerfect = ( 0, 127, 255 )
 339
	def __str__( self ):
 340
		return self.name()
 346
	def name( self ):
 347
		""":return: name of the plug ( the name that identifies it on the node"""
 348
		raise NotImplementedError( "Implement this in subclass" )
 350
	def affects( self, otherplug ):
 351
		"""Set an affects relation ship between this plug and otherplug, saying
 352
		that this plug affects otherplug."""
 353
		raise NotImplementedError( "Implement this in subclass" )
 355
	def affected( self ):
 356
		""":return: tuple containing affected plugs ( plugs that are affected by our value )"""
 357
		raise NotImplementedError( "Implement this in subclass" )
 359
	def affectedBy( self ):
 360
		""":return: tuple containing plugs that affect us ( plugs affecting our value )"""
 361
		raise NotImplementedError( "Implement this in subclass" )
 363
	def providesOutput( self ):
 364
		""":return: True if this is an output plug that can trigger computations"""
 365
		raise NotImplementedError( "Implement this in subclass" )
 367
	def providesInput( self ):
 368
		""":return: True if this is an input plug that will never cause computations"""
 369
		raise NotImplementedError( "Implement this in subclass" )
 374
class plug( iPlug ):
 375
	"""Defines an interface allowing to compare compatabilies according to types.
 377
	Plugs are implemented as descriptors, thus they will be defined on node class
 378
	level, and all static information will remain static
 380
	As descriptors, they are defined statically on the class, and some additional information
 381
	such as connectivity, is stored on the respective class instance. These special methods
 382
	are handled using `NodeBase` class
 384
	Plugs are implemented as descriptors as all type information can be kept per class,
 385
	whereas only connection information changes per node instance.
 387
	Plugs can either be input plugs or output plugs - output plugs affect no other
 388
	plug on a node, but are affected by 0 or more plugs
 390
	:note: class is lowercase as it is used as descriptor ( acting more like a function )
 391
	"""
 392
	kNo,kGood,kPerfect = ( 0, 127, 255 )
 395
	def __init__( self, attribute ):
 396
		"""Intialize the plug with a distinctive name"""
 397
		self._name = None
 398
		self.attr = attribute
 399
		self._affects = list()			# list of plugs that are affected by us
 400
		self._affectedBy = list()		# keeps record of all plugs that affect us
 406
	def __get__( self, obj, cls=None ):
 407
		"""A value has been requested - return our plugshell that brings together
 408
		both, the object and the static plug"""
 410
		if obj is not None:
 411
			return obj.toShell( self )
 414
		return self
 418
		"""We do not use a set method, allowing to override our descriptor through
 419
		actual plug instances in the instance dict. Once deleted, we shine through again"""
 426
	def name( self ):
 427
		""":return: name of plug"""
 428
		return self._name
 430
	def setName( self, name ):
 431
		"""Set the name of this plug - can be set only once"""
 432
		if not self._name:
 433
			self._name = name
 434
		else:
 435
			raise ValueError( "The name of the plug can only be set once" )
 437
	def affects( self, otherplug ):
 438
		"""Set an affects relation ship between this plug and otherplug, saying
 439
		that this plug affects otherplug."""
 440
		if otherplug not in self._affects:
 441
			self._affects.append( otherplug )
 443
		if self not in otherplug._affectedBy:
 444
			otherplug._affectedBy.append( self )
 446
	def affected( self ):
 447
		""":return: tuple containing affected plugs ( plugs that are affected by our value )"""
 448
		return tuple( self._affects )
 450
	def affectedBy( self ):
 451
		""":return: tuple containing plugs that affect us ( plugs affecting our value )"""
 452
		return tuple( self._affectedBy )
 454
	def providesOutput( self ):
 455
		""":return: True if this is an output plug that can trigger computations"""
 456
		return bool( len( self.affectedBy() ) != 0 or self.attr.flags & Attribute.computable )
 458
	def providesInput( self ):
 459
		""":return: True if this is an input plug that will never cause computations"""
 461
		return not self.providesOutput() # previous version did not recognize storage plugs as input
 467
class _PlugShell( tuple ):
 468
	"""Handles per-node-instance plug connection setup and storage. As plugs are
 469
	descriptors and thus an instance of the class, per-node-instance information needs
 470
	special treatment.
 471
	This class is being returned whenever the descriptors get and set methods are called,
 472
	it contains information about the node and the plug being involved, allowing to track
 473
	connection info directly using the node dict
 475
	This allows plugs to be connected, and information to flow through the dependency graph.
 476
	Plugs never act alone since they always belong to a parent node that will be asked for
 477
	value computations if the value is not yet cached.
 478
	:note: Do not instantiate this class youself, it must be created by the node as different
 479
	node types can use different versions of this shell"""
 483
	def __new__( cls, *args ):
 484
		return tuple.__new__( cls, args )
 486
	def __getattr__( self, attr ):
 487
		"""Allow easy attribute access while staying memory efficient"""
 488
		if attr == 'node':
 489
			return self[0]
 490
		if attr == 'plug':
 491
			return self[1]
 494
		return super( _PlugShell, self ).__getattribute__( attr )
 496
	def __repr__ ( self ):
 497
		return "%s.%s" % ( self.node, self.plug )
 499
	def __str__( self ):
 500
		return repr( self )
 507
	def get( self, mode = None ):
 508
		""":return: value of the plug
 509
		:param mode: optional arbitary value specifying the mode of the get attempt"""
 510
		if self.hasCache( ):
 511
			return self.cache( )
 514
		if self.plug.providesOutput( ):
 516
			try:
 517
				result = self.node.compute( self.plug, mode )
 518
			except ComputeError,e:
 519
				raise ComputeError( "%s->%s" % ( repr( self ), str( e ) ) )
 520
			except Exception:		# except all - this is an unknown excetion - just pass it on, keeping the origin
 521
				raise
 523
			if result is None:
 524
				raise AssertionError( "Plug %s returned None - check your node implementation" % ( str( self ) ) )
 528
			self.setCache( result )
 529
			return result
 531
		elif self.plug.providesInput( ):	# has to be separately checked
 533
			inputshell = self.input()
 534
			if not inputshell:
 536
				try:
 537
					return self.plug.attr.default()
 538
				except ( TypeError, MissingDefaultValueError ),e:
 539
					raise MissingDefaultValueError( "Plug %r failed to getrieve its default value and is not connected" % repr( self ), e )
 543
			value = inputshell.get( mode )
 544
			if self.plug.attr.flags & Attribute.check_passing_values:
 545
				if not self.plug.attr.compatabilityRate( value ):
 546
					raise TypeError( "Value coming from input %s is not compatible with %s" % ( str( inputshell ), str( self ) ) )
 548
			return value
 551
		raise AssertionError( "Plug %s did not provide any output or input!" % repr( self ) )
 555
	def set( self, value, ignore_connection = False ):
 556
		"""Set the given value to be used in our plug
 557
		:param ignore_connection: if True, the plug can be destination of a connection and
 558
		will still get its value set - usually it would be overwritten by the value form the
 559
		connection. The set value will be cleared if something upstream in it's connection chain
 560
		changes.
 561
		:raise AssertionError: the respective attribute must be cached, otherwise
 562
		the value will be lost"""
 563
		flags = self.plug.attr.flags
 564
		if flags & Attribute.readonly:
 565
			raise NotWritableError( "Plug %r is not writable" % repr(self) )
 567
		if self.plug.providesOutput( ):
 568
			raise NotWritableError( "Plug %r is not writable as it provides an output itself" % repr(self) )
 570
		if flags & Attribute.uncached:
 571
			raise AssertionError( "Writable attributes must be cached - otherwise the value will not be held" )
 574
		if not ignore_connection and self.input() is not None:
 575
			raise NotWritableError( "Plug %r is connected to %r and thus not explicitly writable" % ( self, self.input() ) )
 577
		self.setCache( value )
 580
	def compatabilityRate( self, value ):
 581
		"""Compute compatability rate for teh given value
 583
		:return: value between 0 and 255, 0 means no compatability, 255 a perfect match
 584
			if larger than 0, the plug can hold the value ( assumed the flags are set correctly )
 585
		"""
 586
		return self.plug.attr.compatabilityRate( value )
 593
	def connect( self, otherplugshell, **kwargs ):
 594
		"""Connect this plug to otherplugshell such that otherplugshell is an input plug for our output
 596
		:param kwargs: everything supported by `Graph.connect`
 597
		:return: self on success, allows chained connections
 598
		:raise PlugAlreadyConnected: if otherplugshell is connected and force is False
 599
		:raise PlugIncompatible: if otherplugshell does not appear to be compatible to this one"""
 600
		if not isinstance( otherplugshell, _PlugShell ):
 601
			raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) )
 603
		return self.node.graph.connect( self, otherplugshell, **kwargs )
 606
	def disconnect( self, otherplugshell ):
 607
		"""Remove the connection to otherplugshell if we are connected to it.
 608
		:note: does not raise if no connection is present"""
 609
		if not isinstance( otherplugshell, _PlugShell ):
 610
			raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) )
 612
		return self.node.graph.disconnect( self, otherplugshell )
 614
	def input( self, predicate = lambda shell: True ):
 615
		""":return: the connected input plug or None if there is no such connection
 616
		:param predicate: plug will only be returned if predicate is true for it
 617
		:note: input plugs have on plug at most, output plugs can have more than one
 618
			connected plug"""
 619
		sourceshell = self.node.graph.input( self )
 620
		if sourceshell and predicate( sourceshell ):
 621
			return sourceshell
 622
		return None
 624
	def outputs( self, predicate = lambda shell: True ):
 625
		""":return: a list of plugs being the destination of the connection
 626
		:param predicate: plug will only be returned if predicate is true for it - shells will be passed in """
 627
		return self.node.graph.outputs( self, predicate = predicate )
 629
	def connections( self, inpt, output, predicate = lambda shell: True ):
 630
		""":return: get all input and or output connections from this shell
 631
			or to this shell as edges ( sourceshell, destinationshell )
 632
		:param predicate: return true for each destination shell that you can except in the
 633
			returned edge or the sourceshell where your shell is the destination.
 634
		:note: Use this method to get edges read for connection/disconnection"""
 635
		outcons = list()
 636
		if inpt:
 637
			sourceshell = self.input( predicate = predicate )
 638
			if sourceshell:
 639
				outcons.append( ( sourceshell, self ) )
 642
		if output:
 643
			outcons.extend( ( self, oshell ) for oshell in self.outputs( predicate = predicate ) )
 645
		return outcons
 647
	def isConnected( self ):
 648
		""":return: True, if the shell is connected as source or as destination of a connection"""
 649
		return self.input() or self.outputs()
 651
	def iterShells( self, **kwargs ):
 652
		"""Iterate plugs and their connections starting at this plug
 653
		:return: generator for plug shells
 654
		:note: supports all options of `iterShells`, this method allows syntax like:
 655
		node.outAttribute.iterShells( )"""
 656
		return iterShells( self, **kwargs )
 662
	def _cachename( self ):
 663
		return self.plug.name() + "_c"
 665
	def hasCache( self ):
 666
		""":return: True if currently store a cached value"""
 667
		return hasattr( self.node, self._cachename() )
 669
	def setCache( self, value ):
 670
		"""Set the given value to be stored in our cache
 671
		:raise: TypeError if the value is not compatible to our defined type"""
 674
		if self.plug.attr.compatabilityRate( value ) == 0:
 675
			raise TypeError( "Plug %r cannot hold value %r as it is not compatible" % ( repr( self ), repr( value ) ) )
 677
		if self.plug.attr.flags & Attribute.uncached:
 678
			return
 682
		self.clearCache( clear_affected = True )
 683
		setattr( self.node, self._cachename(), value )
 685
	def cache( self ):
 686
		""":return: the cached value or raise
 687
		:raise ValueError:"""
 688
		if self.hasCache():
 689
			return getattr( self.node, self._cachename() )
 691
		raise ValueError( "Plug %r did not have a cached value" % repr( self ) )
 693
	def clearCache( self, clear_affected = False, cleared_shells_set = None ):
 694
		"""Empty the cache of our plug
 695
		:param clear_affected: if True, the caches of our affected plugs ( connections
 696
		or affects relations ) will also be cleared
 697
		This operation is recursive, and needs to be as different shells on different nodes
 698
		might do things differently.
 699
		:param cleared_shells_set: if set, it can be used to track which plugs have already been dirtied to
 700
		prevent recursive loops
 701
		Propagation will happen even if we do not have a cache to clear ourselves """
 702
		if self.hasCache():
 703
			del( self.node.__dict__[ self._cachename() ] )
 705
		if clear_affected:
 707
			if not cleared_shells_set:		# initialize our tracking list
 708
				cleared_shells_set = set()
 710
			if self in cleared_shells_set:
 711
				return
 713
			cleared_shells_set.add( self )	# assure we do not come here twice
 715
			all_shells = itertools.chain( self.node.toShells( self.plug.affected() ), self.outputs() )
 716
			for shell in all_shells:
 717
				shell.clearCache( clear_affected = True, cleared_shells_set = cleared_shells_set )
 723
	__rshift__ = lambda self,other: self.connect( other, force=True )
 732
class Graph( nx.DiGraph, iDuplicatable ):
 733
	"""Holds the nodes and their connections
 735
	Nodes are kept in a separate list whereas the plug connections are kept
 736
	in the underlying DiGraph"""
 739
	def __init__( self, **kwargs ):
 740
		"""initialize the DiGraph and add some additional attributes"""
 741
		super( Graph, self ).__init__( **kwargs )
 742
		self._nodes = set()			# our processes from which we can make connections
 744
	def __del__( self ):
 745
		"""Clear our graph"""
 746
		self.clear()				# clear connections
 749
		self._nodes.clear()
 752
	def __getattr__( self , attr ):
 753
		"""Allows access to nodes by name just by accessing the graph directly"""
 754
		try:
 755
			return self.nodeByID( attr )
 756
		except NameError:
 757
			return super( Graph, self ).__getattribute__( attr )
 762
	def writeDot( self , fileOrPath  ):
 763
		"""Write the connections in self to the given file object or path
 764
		:todo: remove if no longer needed"""
 766
		writegraph = nx.DiGraph()
 771
		for node in self.iterNodes():
 772
			writegraph.add_node( node, color="#ebba66", width=4, height=2, fontsize=22 )
 776
		for sshell,eshell in self.edges_iter():
 777
			writegraph.add_edge( sshell,eshell )
 779
			writegraph.add_edge( sshell.node, sshell, color="#000000" )
 781
			writegraph.add_node( sshell, color="#000000", label=sshell.plug )
 782
			writegraph.add_node( eshell, color="#000000", label=eshell.plug )
 784
			writegraph.add_edge( eshell,eshell.node, color="#000000" )
 788
		nx.write_dot(writegraph, fileOrPath)
 793
	def createInstance( self ):
 794
		"""Create a copy of self and return it"""
 795
		return self.__class__( )
 797
	def copyFrom( self, other ):
 798
		"""Duplicate all data from other graph into this one, create a duplicate
 799
		of the nodes as well"""
 800
		def copyshell( shell, nodemap ):
 801
			nodecpy = nodemap[ shell.node ]
 805
			return getattr( nodecpy, shell.plug.name() )
 808
		self.name = other.name
 811
		nodemap = dict()
 812
		for node in other.iterNodes():
 813
			nodecpy = node.duplicate( add_to_graph = False )		# copy node
 814
			nodemap[ node ] = nodecpy
 818
		for duplnode in nodemap.itervalues():
 819
			self.addNode( duplnode )
 822
		for sshell,eshell in other.edges_iter():
 826
			cstart = copyshell( sshell, nodemap )
 827
			cend = copyshell( eshell, nodemap )
 829
			cstart.connect( cend )
 837
	def addNode( self, node ):
 838
		"""Add a new node instance to the graph
 839
		:note: node membership is exclusive, thus node instances
 840
		can only be in one graph at a time
 841
		:return: self, for chained calls"""
 842
		if not isinstance( node, NodeBase ):
 843
			raise TypeError( "Node %r must be of type NodeBase" % node )
 846
		if node in self._nodes:
 847
			return self
 850
		if node.graph is not None:
 851
			node.graph.removeNode( node )
 854
		self._nodes.add( node )		# assure the node knows us
 855
		node.graph = weakref.proxy( self )
 857
		return self		# assure we have the graph set
 859
	def removeNode( self, node ):
 860
		"""Remove the given node from the graph ( if it exists in it )"""
 861
		try:
 863
			for sshell, eshell in node.connections( 1, 1 ):
 864
				self.disconnect( sshell, eshell )
 867
			node.graph = None
 868
			self._nodes.remove( node )
 869
		except KeyError:
 870
			pass
 872
	def clearCache( self ):
 873
		"""Clear the cache of all nodes in the graph - this forces the graph
 874
		to reevaluate on the next request"""
 875
		for node in self._nodes:
 876
			node.clearCache()
 882
	def hasNode( self , node ):
 883
		""":return: True if the node is in this graph, false otherwise"""
 884
		return node in self._nodes
 886
	def iterNodes( self, predicate = lambda node: True ):
 887
		""":return: generator returning all nodes in this graph
 888
		:param predicate: if True for node, it will be returned
 889
		:note: there is no particular order"""
 890
		for node in self._nodes:
 891
			if predicate( node ):
 892
				yield node
 895
	def iterConnectedNodes( self, predicate = lambda node: True ):
 896
		""":return: generator returning all nodes that are connected in this graph,
 897
			in no particular order.
 898
			For an ordered itereration, use `iterShells`.
 900
		:param predicate: if True for node, it will be returned"""
 902
		nodes_seen = set()
 903
		for node,plug in self.nodes_iter():
 904
			if node in nodes_seen:
 905
				continue
 906
			nodes_seen.add( node )
 907
			if predicate( node ):
 908
				yield node
 911
	def nodes( self ):
 912
		""":return: immutable copy of the nodes used in the graph"""
 913
		return tuple( self._nodes )
 915
	def numNodes( self ):
 916
		""":return: number of nodes in the graph"""
 917
		return len( self._nodes )
 919
	def nodeByID( self, nodeID ):
 920
		""":return: instance of a node according to the given node id
 921
		:raise NameError: if no such node exists in graph"""
 922
		for node in self.iterNodes():
 923
			if node.id() == nodeID:
 924
				return node
 926
		raise NameError( "Node with ID %s not found in graph" % nodeID )
 932
	def connect( self, sourceshell, destinationshell, force = False ):
 933
		"""Connect this plug to destinationshell such that destinationshell is an input plug for our output
 935
		:param sourceshell: PlugShell being source of the connection
 936
		:param destinationshell: PlugShell being destination of the connection
 937
		:param force: if False, existing connections to destinationshell will not be broken, but an exception is raised
 938
			if True, existing connection may be broken
 939
		:return: self on success, allows chained connections
 940
		:raise PlugAlreadyConnected: if destinationshell is connected and force is False
 941
		:raise PlugIncompatible: if destinationshell does not appear to be compatible to this one"""
 943
		if not sourceshell.node.graph is destinationshell.node.graph:
 944
			raise AssertionError( "You cannot connect nodes from different graphs" )
 946
		self._nodes.add( sourceshell.node )
 947
		self._nodes.add( destinationshell.node )
 950
		if sourceshell.plug.attr.connectionAffinity( destinationshell.plug.attr ) == 0:
 951
			raise PlugIncompatible( "Cannot connect %r to %r as they are incompatible" % ( repr( sourceshell ), repr( destinationshell ) ) )
 954
		oinput = destinationshell.input( )
 955
		if oinput is not None:
 956
			if oinput == sourceshell:
 957
				return sourceshell
 959
			if not force:
 960
				raise PlugAlreadyConnected( "Cannot connect %r to %r as it is already connected" % ( repr( sourceshell ), repr( destinationshell ) ) )
 963
			oinput.disconnect( destinationshell )
 967
		self.add_edge( sourceshell, v = destinationshell )
 968
		return sourceshell
 970
	def disconnect( self, sourceshell, destinationshell ):
 971
		"""Remove the connection between sourceshell to destinationshell if they are connected
 972
		:note: does not raise if no connection is present"""
 973
		self.remove_edge( sourceshell, v = destinationshell )
 976
		for shell in sourceshell,destinationshell:
 977
			if len( self.neighbors( shell ) ) == 0:
 978
				self.remove_node( shell )
 980
	def input( self, plugshell ):
 981
		""":return: the connected input plug of plugshell or None if there is no such connection
 982
		:note: input plugs have on plug at most, output plugs can have more than one connected plug"""
 983
		try:
 984
			pred = self.predecessors( plugshell )
 985
			if pred:
 986
				return pred[0]
 987
		except nx.NetworkXError:
 988
			pass
 990
		return None
 992
	def outputs( self, plugshell, predicate = lambda x : True ):
 993
		""":return: a list of plugs being the destination of the connection to plugshell
 994
		:param predicate: plug will only be returned if predicate is true for it - shells will be passed in """
 995
		try:
 996
			return [ s for s in self.successors( plugshell ) if predicate( s ) ]
 997
		except nx.NetworkXError:
 998
			return list()
1003
class _NodeBaseCheckMeta( type ):
1004
	"""Class checking the consistency of the nodebase class before it is being created"""
1005
	def __new__( metacls, name, bases, clsdict ):
1006
		"""Check:
1007
			- every plugname must correspond to a node member name
1008
		"""
1009
		newcls = super( _NodeBaseCheckMeta, metacls ).__new__( metacls, name, bases, clsdict )
1013
		membersdict = inspect.getmembers( newcls )		# do not filter, as plugs could be overridden
1014
		try:
1015
			if hasattr( newcls, "plugsStatic" ):
1016
				for plug in newcls.plugsStatic( ):
1017
					for name,member in membersdict:
1018
						if member == plug and plug.name() != name:
1020
							if hasattr( plug, 'setName' ):
1021
								plug.setName( name )
1022
							else:
1023
								raise AssertionError( "Plug %r is named %s, but must be named %s as in its class %s" % ( plug, plug.name(), name, newcls ) )
1031
		except TypeError:
1035
			pass
1037
		return newcls
1042
class NodeBase( iDuplicatable ):
1043
	"""Base class that provides support for plugs to the superclass.
1044
	It will create some simple tracking attriubtes required for the plug system
1045
	to work
1047
	Nodes can compute values of their plugs if these do not have a cache.
1049
	Nodes are identified by an ID - the default graph implementation though will
1050
	be okay with just having instances.
1051
	It is also being used for string representations of this node"""
1052
	shellcls = _PlugShell					# class used to instantiate new shells
1053
	__metaclass__ = _NodeBaseCheckMeta		# check the class before its being created
1056
	def __init__( self, *args, **kwargs ):
1057
		"""We require a directed graph to track the connectivity between the plugs.
1058
		It must be supplied by the super class and should be as global as required to
1059
		connecte the NodeBases together properly.
1061
		:param kwargs: 'id' = id of the instance, defaults to None if it is not required
1062
		:note: we are super() compatible, and assure our base is initialized correctly"""
1063
		self.graph = None
1064
		self._id = None
1067
		newid = kwargs.get( 'id', None )
1068
		if newid:
1069
			self.setID( newid )
1071
	def __del__( self ):
1072
		"""Remove ourselves from the graph and delete our connections"""
1075
		try:
1077
			pass
1078
		except (AttributeError,ReferenceError):		# .graph could be None
1079
			pass
1081
	def __str__( self ):
1082
		"""Use our id as string or the default implementation"""
1083
		if self.id() is not None:
1084
			return str( self.id() )
1086
		return super( NodeBase, self ).__str__( )
1090
	def createInstance( self, *args, **kwargs ):
1091
		"""Create a copy of self and return it
1093
		:note: override by subclass  - the __init__ methods shuld do the rest"""
1094
		return self.__class__( id = self.id() )
1096
	def copyFrom( self, other, add_to_graph = True ):
1097
		"""Just take the graph from other, but do not ( never ) duplicate it
1099
		:param add_to_graph: if true, the new node instance will be added to the graph of
1100
		:note: default implementation does not copy plug caches ( which are stored in
1101
			the node dict - this is because a reevaluate is usually required on the
1102
			duplicated node"""
1103
		self.setID( other.id() )				# id copying would create equally named clones for now
1104
		if add_to_graph and other.graph:		# add ourselves to the graph of the other node
1105
			other.graph.addNode( self )
1110
	def compute( self, plug, mode ):
1111
		"""Called whenever a plug needs computation as the value its value is not
1112
		cached or marked dirty ( as one of the inputs changed )
1114
		:param plug: the static plug instance that requested which requested the computation.
1115
			It is the instance you defined on the class
1116
		:param mode: the mode of operation. Its completely up to the superclasses how that
1117
			attribute is going to be used
1118
		:note: to be implemented by superclass """
1119
		raise NotImplementedError( "To be implemented by subclass" )
1124
	def setID( self, newID ):
1125
		"""Set id of this node to newiD
1126
		:return: previously assigned id"""
1127
		curid = self.id()
1128
		self._id = newID
1129
		return curid
1131
	def id( self ):
1132
		""":return: ID of this instance"""
1133
		return self._id
1138
	def toShells( self, plugs ):
1139
		""":return: list of shells made from plugs and our node"""
1141
		outlist = list()
1142
		for plug in plugs:
1143
			outlist.append( self.toShell( plug ) )
1144
		return outlist
1146
	def toShell( self, plug ):
1147
		""":return: a plugshell as suitable to for this class"""
1148
		return getattr( self, 'shellcls' )( self, plug )		# prevent cls variable to be bound !
1150
	def clearCache( self ):
1151
		"""Clear the cache of all plugs on this node - this basically forces it
1152
		to recompute the next time an output plug is being queried"""
1153
		for plug in self.plugs( ):
1154
			self.toShell( plug ).clearCache( clear_affected = False )
1156
	@classmethod
1157
	def plugsStatic( cls, predicate = lambda x: True ):
1158
		""":return: list of static plugs as defined on this node - they are class members
1159
		:param predicate: return static plug only if predicate is true
1160
		:note: Use this method only if you do not have an instance - there are nodes
1161
			that actually have no static plug information, but will dynamically generate them.
1162
			For this to work, they need an instance - thus the plugs method is an instance
1163
			method and is meant to be the most commonly used one."""
1164
		pred = lambda m: isinstance( m, plug )
1167
		pluggen = ( m[1] for m in inspect.getmembers( cls, predicate = pred ) if predicate( m[1] ) )
1168
		return list( pluggen )
1170
	def plugs( self, predicate = lambda x: True ):
1171
		""":return: list of dynamic plugs as defined on this node - they are usually retrieved
1172
			on class level, but may be overridden on instance level
1173
		:param predicate: return static plug only if predicate is true"""
1177
		all_dict_holders = itertools.chain( ( self, ), self.__class__.mro() )
1178
		all_dicts = ( instance.__dict__ for instance in all_dict_holders )
1179
		pluggen = ( v for d in all_dicts for v in d.itervalues() if isinstance( v, plug ) and predicate( v ) )
1181
		return list( pluggen )
1183
	@classmethod
1184
	def inputPlugsStatic( cls, **kwargs ):
1185
		""":return: list of static plugs suitable as input
1186
		:note: convenience method"""
1187
		return cls.plugsStatic( predicate = lambda p: p.providesInput(), **kwargs )
1189
	def inputPlugs( self, **kwargs ):
1190
		""":return: list of plugs suitable as input
1191
		:note: convenience method"""
1192
		return self.plugs( predicate = lambda p: p.providesInput(), **kwargs )
1194
	@classmethod
1195
	def outputPlugsStatic( cls, **kwargs ):
1196
		""":return: list of static plugs suitable to deliver output
1197
		:note: convenience method"""
1198
		return cls.plugsStatic( predicate = lambda p: p.providesOutput(), **kwargs )
1200
	def outputPlugs( self, **kwargs ):
1201
		""":return: list of plugs suitable to deliver output
1202
		:note: convenience method"""
1203
		return self.plugs( predicate = lambda p: p.providesOutput(), **kwargs )
1205
	def connections( self, inpt, output ):
1206
		""":return: Tuples of input shells defining a connection of the given type from
1207
			tuple( InputNodeOuptutShell, OurNodeInputShell ) for input connections and
1208
			tuple( OurNodeOuptutShell, OutputNodeInputShell )
1209
		:param inpt: include input connections to this node
1210
		:param output: include output connections ( from this node to others )"""
1211
		outConnections = list()
1212
		plugs = self.plugs()
1214
		if inpt:
1215
			shells = self.toShells( ( p for p in plugs if p.providesInput() ) )
1216
			for shell in shells:
1217
				ishell = shell.input( )
1218
				if ishell:
1219
					outConnections.append( ( ishell, shell ) )
1224
		if output:
1225
			shells = self.toShells( ( p for p in plugs if p.providesOutput() ) )
1226
			for shell in shells:
1227
				outConnections.extend( ( ( shell, oshell ) for oshell in shell.outputs() ) )
1230
		return outConnections
1232
	@classmethod
1233
	def filterCompatiblePlugs( cls, plugs, attrOrValue, raise_on_ambiguity = False, attr_affinity = False,
1234
							  	attr_as_source=True ):
1235
		""":return: sorted list of (rate,plug) tuples suitable to deal with the given attribute.
1236
			Thus they could connect to it as well as get their value set.
1237
			Most suitable plug comes first.
1238
			Incompatible plugs will be pruned.
1239
		:param attrOrValue: either an attribute or the value you would like to set to the
1240
			attr at the plug in question.
1241
		:param raise_on_ambiguity: if True, the method raises if a plug has the same
1242
			rating as another plug already on the output list, thus it's not clear anymore
1243
			which plug should handle a request
1244
		:param attr_affinity: if True, it will not check connection affinity, but attribute
1245
			affinity only. It checks how compatible the attributes of the plugs are, disregarding
1246
			whether they can be connected or not
1247
			Only valid if attrOrValue is an attribute
1248
		:param attr_as_source: if True, attrOrValue will be treated as the source of a connection or
1249
			each plug would need to take its values.
1250
			if False, attrOrValue is the destination of a connection and it needs to take values of the given plugs
1251
			or they would connect to it. Only used if attrOrValue is an attribute.
1252
		:raise TypeError: if ambiguous input was found"""
1254
		attribute = None
1255
		value = attrOrValue
1256
		if isinstance( attrOrValue, Attribute ):
1257
			attribute = attrOrValue
1259
		outSorted = list()
1260
		for plug in plugs:
1262
			if attribute:
1263
				sourceattr = attribute
1264
				destinationattr = plug.attr
1265
				if not attr_as_source:
1266
					destinationattr = attribute
1267
					sourceattr = plug.attr
1269
				if attr_affinity:
1270
					rate = destinationattr.affinity( sourceattr )	# how good can dest store source ?
1271
				else:
1272
					rate = sourceattr.connectionAffinity( destinationattr )
1275
			else:
1276
				rate = plug.attr.compatabilityRate( value )
1279
			if not rate:
1280
				continue
1282
			outSorted.append( ( rate, plug ) )
1285
		outSorted.sort()
1286
		outSorted.reverse()		# high rates first
1288
		if raise_on_ambiguity:
1289
			ratemap = dict()
1290
			for rate,plug in outSorted:
1291
				ratemap.setdefault( rate, list() ).append( plug )
1293
			report = ""
1294
			for rate, pluglist in ratemap.iteritems( ):
1295
				if len( pluglist ) > 1:
1296
					report += "Rate: %i :" % rate
1297
					for plug in pluglist:
1298
						report += "\n%s" % str(plug)
1301
			if report:
1302
				report = "Ambiguous plugs found\n" + report
1303
				raise TypeError( report  )
1306
		return outSorted