2
"""Contains a simple but yet powerful dependency graph engine allowing computations
3
to be organized more efficiently.
5
__docformat__ = "restructuredtext"
8
from collections import deque
12
from util import iDuplicatable
14
__all__ = ("ConnectionError", "PlugIncompatible", "PlugAlreadyConnected", "AccessError",
15
"NotWritableError", "NotReadableError", "MissingDefaultValueError", "ComputeError",
16
"ComputeFailed", "ComputeFailed", "PlugUnhandled",
17
"iterShells", "Attribute", "iPlug", "plug", "Graph", "NodeBase")
24
class ConnectionError( Exception ):
25
"""Exception base for all plug related errors"""
27
class PlugIncompatible( ConnectionError, TypeError ):
28
"""Thrown whenever plugs are not compatible with each other during connection"""
30
class PlugAlreadyConnected( ConnectionError ):
31
"""Thrown if one tries to connect a plug to otherplug when otherplug is already connected"""
33
class AccessError( Exception ):
34
"""Base class for all errors indicating invalid access"""
36
class NotWritableError( AccessError ):
37
"""Thrown if a non-writable plug is being written to"""
39
class NotReadableError( AccessError ):
40
"""Thrown if a non-readable attribute is being read"""
42
class MissingDefaultValueError( AccessError ):
43
"""Thrown if a default value is missing for input attributes that are not connected"""
45
class ComputeError( Exception ):
46
"""Thrown if the computation done by a plug failed by an unknown exception
47
It will be passed on in the exception"""
49
class ComputeFailed( ComputeError ):
50
"""Raised by the derived class computing a value if the computational goal
51
cannot be achieved ( anymore )"""
53
class PlugUnhandled( ComputeError ):
54
"""Raised if a plug was not handled by the node's compute method"""
64
def iterShells( rootPlugShell, stopAt = lambda x: False, prune = lambda x: False,
65
direction = "up", visit_once = False, branch_first = False ):
66
"""Iterator starting at rootPlugShell going "up"stream ( input ) or "down"stream ( output )
67
breadth first over plugs, applying filter functions as defined.
69
:param rootPlugShell: shell at which to start the traversal. The root plug will be returned as well
70
:param stopAt: if function returns true for given PlugShell, iteration will not proceed
71
at that point ( possibly continuing at other spots ). Function will always be called, even
72
if the shell would be pruned as well. The shell serving as stop marker will not be returned
73
:param prune: if function returns true for given PlugShell, the shell will not be returned
74
but iteration continues.
75
:param direction: traversal direction
76
"up" upstream, in direction of inputs of plugs
77
"down" downstream, in direction of outputs of plugs
78
:param visit_once: if True, plugs will only be returned once, even though they are
79
:param branch_first: if True, individual branches will be travelled first ( thuse the node will be left quickly following the datastream ).
80
If False, the plugs on the ndoe will be returned first before proceeding to the next node
81
encountered several times as several noodes are connected to them in some way. """
84
stack.append( rootPlugShell )
86
def addToStack( node, stack, lst, branch_first ):
88
stack.extend( node.toShell( plug ) for plug in lst )
90
reviter = ( node.toShell( lst[i] ) for i in range( len( lst )-1,-1,-1) )
91
stack.extendleft( reviter )
92
# END addToStack local method
94
def addOutputToStack( stack, lst, branch_first ):
98
stack.extendleft( reversed( lst[:] ) )
99
# END addOutputToStack local method
112
if not prune( shell ):
115
if direction == 'up':
117
addToStack( shell.node, stack, shell.plug.affectedBy(), branch_first )
118
# END if provides output
121
ishell = shell.input( )
124
stack.append( ishell )
126
stack.appendleft( ishell )
127
# END has input connection
131
# could also be connected - follow them
133
# fist the outputs, then the internals ( this ends up with the same effect )
134
addToStack( shell.node, stack, shell.plug.affected(), branch_first )
135
addOutputToStack( stack, shell.outputs(), branch_first )
137
addOutputToStack( stack, shell.outputs(), branch_first )
138
addToStack( shell.node, stack, shell.plug.affected(), branch_first )
140
# END for each shell on work stack
150
#{ END Plugs and Attributes
152
class Attribute( object ):
153
"""Simple class defining the type of a plug and several flags that
154
affect it. Additionally it can determine how well suited another attribute is
157
exact_type: if True, derived classes of our typecls are not considered to be a valid type.
158
i.e: basestring could be stored in a str attr if exact type is false - its less than we need, but
160
Putting a str into a basestring attribute will always work though, as it would be more than we need
161
readonly: if True, the attribute's plug cannot be written to. Read-only attributes can be used
162
as storage that the user can read, but not write.
163
You can write read-only plugs by directly setting its cache - this of course - is only
164
for the node itself, but will never be done by the framework
167
Nodes are automatically computable if they are affected by another plug.
168
If this is not the case, they are marked input only and are not computed.
169
If this flag is true, even unaffeted plugs are computable.
170
Plugs that affect something are automatically input plugs and will not be computed.
171
If the plug does not affect anything and this flag is False, they are seen as input plugs
174
The system does not allow plugs to be input and output plugs at the same time, thus your compute
175
cannot be triggered by your own compute
177
cls: if True, the plug requires classes to be set ( instances of 'type' ) , but no instances of these classes
178
uncached: if False, computed values may be cached, otherwise they will always be recomputed.
179
unconnectable: if True, the node cannot be the destination of a connection
180
check_passing_values: check each value as it flows through a connection - usually compatability is only checked
181
on connection and once values are set, but not if they flow through an existing connection
184
Although default values can be simple primitives are classes, a callable is specifically supported.
185
It allows you to get a callback whenever a default value is required.
186
The same result could be achieved by connected the plug in question, but dynamic defaults are a quick
188
Your returned value will be type-checked against the required type if check_passing_values is set."""
189
kNo, kGood, kPerfect = 0, 127, 255 # specify how good attributes fit together
190
exact_type, readonly, computable, cls, uncached, unconnectable,check_passing_values = ( 1, 2, 4, 8, 16, 32, 64 )
192
def __init__( self, typeClass, flags, default = None ):
193
self.typecls = typeClass
194
self.flags = flags # used for bitflags describing mode
195
self._default = default
197
# check default value for compatability !
198
if default is not None:
199
if not hasattr( default, '__call__' ) and self.compatabilityRate( default ) == 0:
200
raise TypeError( "Default value %r is not compatible with this attribute" % default )
201
# END default type check
203
def _getClassRating( self, cls, exact_type ):
204
""" compute class rating
206
:return: rating based on value being a class and compare.
207
0 means there is no type compatability, 255 matches comparecls, or linearly
208
less if is just part of the mro of value
210
if not isinstance( cls, type ):
213
mro = self.typecls.mro()
217
# if we are in the classes mr, then we can perfectly store the class
218
# as it is more than we need
219
if not exact_type and self.typecls in cls.mro():
222
# END simple mro checking
227
rate = ( float( mro.index( cls ) ) / float( len( mro ) - 1 ) ) * self.kPerfect
229
if exact_type and rate != self.kPerfect: # exact type check
236
def affinity( self, otherattr ):
237
"""Compute affinity for otherattr.
240
rating from 0 to 255 defining how good the attribtues match each
241
other in general - how good can we store values of otherattr ?
242
Thus this comparison is directed.
244
:note: for checking connections, use `connectionAffinity`"""
245
# see whether our class flags match
246
if self.flags & self.cls != otherattr.flags & self.cls:
249
# DEFAULT VALUE CHECK
250
#######################
251
# see whether we destination can handle our default value - if not
252
# just go for a class comparison
255
defvalue = otherattr.default()
256
rate = self.compatabilityRate( defvalue )
257
except (MissingDefaultValueError,TypeError):
258
rate = self._getClassRating( otherattr.typecls, self.flags & self.exact_type )
259
# finally check how good our types match
263
def connectionAffinity( self, destinationattr ):
264
"""Compute connection affinity for given destination attribute
267
rating from 0 to 255 defining the quality of the connection to
268
otherplug. an affinity of 0 mean connection is not possible, 255 mean the connection
270
The connection is a directed one from self -> otherplug """
271
if destinationattr.flags & self.unconnectable: # destination must be connectable
274
# how good can the destination attr deal with us ?
275
return destinationattr.affinity( self )
277
def compatabilityRate( self, value ):
278
"""Compute value's compatability rate
280
:return: value between 0 and 255, 0 means no compatability, 255 a perfect match.
281
if larger than 0, the plug can hold the value ( assumed the flags are set correctly ). """
282
if isinstance( value, type ):
283
# do we need a class ?
284
if not self.flags & self.cls:
285
return 0 # its a class
287
# check compatability
288
return self._getClassRating( value, self.flags & self.exact_type )
291
if not self.flags & self.cls:
292
return self._getClassRating( value.__class__, self.flags & self.exact_type )
293
# END is instance type
298
""":return: default value stored for this attribute, or raise
299
:note: handles dynamic defaults, so you should not directly access the default member variable
300
:raise MissingDefaultValueError: if attribute does not have a default value
301
:raise TypeError: if value returned by dynamic attribute has incorrect type"""
302
if self._default is None:
303
raise MissingDefaultValueError( "Attribute %r has no default value" % self )
306
######################
307
if hasattr( self._default, '__call__' ):
308
default = self._default()
309
if self.flags & self.check_passing_values and self.compatabilityRate( default ) == 0:
310
raise TypeError( "Dynamic default value had incorrect type: %s" % type( default ) )
312
# END dynamic default handling
314
# normal static default
322
class iPlug( object ):
323
"""Defines an interface allowing to compare compatabilies according to types.
325
Plugs can either be input plugs or output plugs - output plugs affect no other
326
plug on a node, but are affected by 0 or more plugs .
328
By convention, a plug has a name - that name must also be the name of the
329
member attribute that stores the plag. Plugs, possibly different instances of it,
330
need to be re-retrieved on freshly duplicated nodes to allow graph duplication to
333
:note: if your plug class supports the ``setName`` method, a metaclass will
334
adjust the name of your plug to match the name it has in the parent class
336
kNo,kGood,kPerfect = ( 0, 127, 255 )
338
#{ Base Implementation
342
#} END base implementation
347
""":return: name of the plug ( the name that identifies it on the node"""
348
raise NotImplementedError( "Implement this in subclass" )
350
def affects( self, otherplug ):
351
"""Set an affects relation ship between this plug and otherplug, saying
352
that this plug affects otherplug."""
353
raise NotImplementedError( "Implement this in subclass" )
355
def affected( self ):
356
""":return: tuple containing affected plugs ( plugs that are affected by our value )"""
357
raise NotImplementedError( "Implement this in subclass" )
359
def affectedBy( self ):
360
""":return: tuple containing plugs that affect us ( plugs affecting our value )"""
361
raise NotImplementedError( "Implement this in subclass" )
363
def providesOutput( self ):
364
""":return: True if this is an output plug that can trigger computations"""
365
raise NotImplementedError( "Implement this in subclass" )
367
def providesInput( self ):
368
""":return: True if this is an input plug that will never cause computations"""
369
raise NotImplementedError( "Implement this in subclass" )
375
"""Defines an interface allowing to compare compatabilies according to types.
377
Plugs are implemented as descriptors, thus they will be defined on node class
378
level, and all static information will remain static
380
As descriptors, they are defined statically on the class, and some additional information
381
such as connectivity, is stored on the respective class instance. These special methods
382
are handled using `NodeBase` class
384
Plugs are implemented as descriptors as all type information can be kept per class,
385
whereas only connection information changes per node instance.
387
Plugs can either be input plugs or output plugs - output plugs affect no other
388
plug on a node, but are affected by 0 or more plugs
390
:note: class is lowercase as it is used as descriptor ( acting more like a function )
392
kNo,kGood,kPerfect = ( 0, 127, 255 )
394
#{ Overridden object methods
395
def __init__( self, attribute ):
396
"""Intialize the plug with a distinctive name"""
398
self.attr = attribute
399
self._affects = list() # list of plugs that are affected by us
400
self._affectedBy = list() # keeps record of all plugs that affect us
402
#} END object overridden methods
406
def __get__( self, obj, cls=None ):
407
"""A value has been requested - return our plugshell that brings together
408
both, the object and the static plug"""
409
# in class mode we return ourselves for access
411
return obj.toShell( self )
413
# class attributes just return the descriptor itself for direct access
417
#def __set__( self, obj, value ):
418
"""We do not use a set method, allowing to override our descriptor through
419
actual plug instances in the instance dict. Once deleted, we shine through again"""
420
# raise AssertionError( "To set this value, use the node.plug.set( value ) syntax" )
421
# obj.toShell( self ).set( value )
427
""":return: name of plug"""
430
def setName( self, name ):
431
"""Set the name of this plug - can be set only once"""
435
raise ValueError( "The name of the plug can only be set once" )
437
def affects( self, otherplug ):
438
"""Set an affects relation ship between this plug and otherplug, saying
439
that this plug affects otherplug."""
440
if otherplug not in self._affects:
441
self._affects.append( otherplug )
443
if self not in otherplug._affectedBy:
444
otherplug._affectedBy.append( self )
446
def affected( self ):
447
""":return: tuple containing affected plugs ( plugs that are affected by our value )"""
448
return tuple( self._affects )
450
def affectedBy( self ):
451
""":return: tuple containing plugs that affect us ( plugs affecting our value )"""
452
return tuple( self._affectedBy )
454
def providesOutput( self ):
455
""":return: True if this is an output plug that can trigger computations"""
456
return bool( len( self.affectedBy() ) != 0 or self.attr.flags & Attribute.computable )
458
def providesInput( self ):
459
""":return: True if this is an input plug that will never cause computations"""
460
#return len( self._affects ) != 0 and not self.providesOutput( )
461
return not self.providesOutput() # previous version did not recognize storage plugs as input
464
#} END plugs and attributes
467
class _PlugShell( tuple ):
468
"""Handles per-node-instance plug connection setup and storage. As plugs are
469
descriptors and thus an instance of the class, per-node-instance information needs
471
This class is being returned whenever the descriptors get and set methods are called,
472
it contains information about the node and the plug being involved, allowing to track
473
connection info directly using the node dict
475
This allows plugs to be connected, and information to flow through the dependency graph.
476
Plugs never act alone since they always belong to a parent node that will be asked for
477
value computations if the value is not yet cached.
478
:note: Do not instantiate this class youself, it must be created by the node as different
479
node types can use different versions of this shell"""
483
def __new__( cls, *args ):
484
return tuple.__new__( cls, args )
486
def __getattr__( self, attr ):
487
"""Allow easy attribute access while staying memory efficient"""
493
# let it raise the typical error
494
return super( _PlugShell, self ).__getattribute__( attr )
496
def __repr__ ( self ):
497
return "%s.%s" % ( self.node, self.plug )
502
#} END object overrides
507
def get( self, mode = None ):
508
""":return: value of the plug
509
:param mode: optional arbitary value specifying the mode of the get attempt"""
513
# Output plugs compute values
514
if self.plug.providesOutput( ):
515
# otherwise compute the value
517
result = self.node.compute( self.plug, mode )
518
except ComputeError,e:
519
raise ComputeError( "%s->%s" % ( repr( self ), str( e ) ) )
520
except Exception: # except all - this is an unknown excetion - just pass it on, keeping the origin
524
raise AssertionError( "Plug %s returned None - check your node implementation" % ( str( self ) ) )
527
# try to cache computed values
528
self.setCache( result )
530
# END plug provides output
531
elif self.plug.providesInput( ): # has to be separately checked
532
# check for connection
533
inputshell = self.input()
535
# check for default value
537
return self.plug.attr.default()
538
except ( TypeError, MissingDefaultValueError ),e:
539
raise MissingDefaultValueError( "Plug %r failed to getrieve its default value and is not connected" % repr( self ), e )
540
# END if we have no input
542
# query the connected plug for the value
543
value = inputshell.get( mode )
544
if self.plug.attr.flags & Attribute.check_passing_values:
545
if not self.plug.attr.compatabilityRate( value ):
546
raise TypeError( "Value coming from input %s is not compatible with %s" % ( str( inputshell ), str( self ) ) )
549
# END plug provides input
551
raise AssertionError( "Plug %s did not provide any output or input!" % repr( self ) )
555
def set( self, value, ignore_connection = False ):
556
"""Set the given value to be used in our plug
557
:param ignore_connection: if True, the plug can be destination of a connection and
558
will still get its value set - usually it would be overwritten by the value form the
559
connection. The set value will be cleared if something upstream in it's connection chain
561
:raise AssertionError: the respective attribute must be cached, otherwise
562
the value will be lost"""
563
flags = self.plug.attr.flags
564
if flags & Attribute.readonly:
565
raise NotWritableError( "Plug %r is not writable" % repr(self) )
567
if self.plug.providesOutput( ):
568
raise NotWritableError( "Plug %r is not writable as it provides an output itself" % repr(self) )
570
if flags & Attribute.uncached:
571
raise AssertionError( "Writable attributes must be cached - otherwise the value will not be held" )
574
if not ignore_connection and self.input() is not None:
575
raise NotWritableError( "Plug %r is connected to %r and thus not explicitly writable" % ( self, self.input() ) )
577
self.setCache( value )
580
def compatabilityRate( self, value ):
581
"""Compute compatability rate for teh given value
583
:return: value between 0 and 255, 0 means no compatability, 255 a perfect match
584
if larger than 0, the plug can hold the value ( assumed the flags are set correctly )
586
return self.plug.attr.compatabilityRate( value )
593
def connect( self, otherplugshell, **kwargs ):
594
"""Connect this plug to otherplugshell such that otherplugshell is an input plug for our output
596
:param kwargs: everything supported by `Graph.connect`
597
:return: self on success, allows chained connections
598
:raise PlugAlreadyConnected: if otherplugshell is connected and force is False
599
:raise PlugIncompatible: if otherplugshell does not appear to be compatible to this one"""
600
if not isinstance( otherplugshell, _PlugShell ):
601
raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) )
603
return self.node.graph.connect( self, otherplugshell, **kwargs )
606
def disconnect( self, otherplugshell ):
607
"""Remove the connection to otherplugshell if we are connected to it.
608
:note: does not raise if no connection is present"""
609
if not isinstance( otherplugshell, _PlugShell ):
610
raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) )
612
return self.node.graph.disconnect( self, otherplugshell )
614
def input( self, predicate = lambda shell: True ):
615
""":return: the connected input plug or None if there is no such connection
616
:param predicate: plug will only be returned if predicate is true for it
617
:note: input plugs have on plug at most, output plugs can have more than one
619
sourceshell = self.node.graph.input( self )
620
if sourceshell and predicate( sourceshell ):
624
def outputs( self, predicate = lambda shell: True ):
625
""":return: a list of plugs being the destination of the connection
626
:param predicate: plug will only be returned if predicate is true for it - shells will be passed in """
627
return self.node.graph.outputs( self, predicate = predicate )
629
def connections( self, inpt, output, predicate = lambda shell: True ):
630
""":return: get all input and or output connections from this shell
631
or to this shell as edges ( sourceshell, destinationshell )
632
:param predicate: return true for each destination shell that you can except in the
633
returned edge or the sourceshell where your shell is the destination.
634
:note: Use this method to get edges read for connection/disconnection"""
637
sourceshell = self.input( predicate = predicate )
639
outcons.append( ( sourceshell, self ) )
640
# END input connection handling
643
outcons.extend( ( self, oshell ) for oshell in self.outputs( predicate = predicate ) )
647
def isConnected( self ):
648
""":return: True, if the shell is connected as source or as destination of a connection"""
649
return self.input() or self.outputs()
651
def iterShells( self, **kwargs ):
652
"""Iterate plugs and their connections starting at this plug
653
:return: generator for plug shells
654
:note: supports all options of `iterShells`, this method allows syntax like:
655
node.outAttribute.iterShells( )"""
656
return iterShells( self, **kwargs )
662
def _cachename( self ):
663
return self.plug.name() + "_c"
665
def hasCache( self ):
666
""":return: True if currently store a cached value"""
667
return hasattr( self.node, self._cachename() )
669
def setCache( self, value ):
670
"""Set the given value to be stored in our cache
671
:raise: TypeError if the value is not compatible to our defined type"""
672
# attr compatability - always run this as we want to be warned if the compute
673
# method returns a value that does not match
674
if self.plug.attr.compatabilityRate( value ) == 0:
675
raise TypeError( "Plug %r cannot hold value %r as it is not compatible" % ( repr( self ), repr( value ) ) )
677
if self.plug.attr.flags & Attribute.uncached:
680
# our cache changed - dirty downstream plugs - thus clear the cache
681
# NOTE: this clears our own cache by deleting it, but we re-set it
682
self.clearCache( clear_affected = True )
683
setattr( self.node, self._cachename(), value )
686
""":return: the cached value or raise
687
:raise ValueError:"""
689
return getattr( self.node, self._cachename() )
691
raise ValueError( "Plug %r did not have a cached value" % repr( self ) )
693
def clearCache( self, clear_affected = False, cleared_shells_set = None ):
694
"""Empty the cache of our plug
695
:param clear_affected: if True, the caches of our affected plugs ( connections
696
or affects relations ) will also be cleared
697
This operation is recursive, and needs to be as different shells on different nodes
698
might do things differently.
699
:param cleared_shells_set: if set, it can be used to track which plugs have already been dirtied to
700
prevent recursive loops
701
Propagation will happen even if we do not have a cache to clear ourselves """
703
del( self.node.__dict__[ self._cachename() ] )
706
# our cache changed - dirty downstream plugs - thus clear the cache
707
if not cleared_shells_set: # initialize our tracking list
708
cleared_shells_set = set()
710
if self in cleared_shells_set:
713
cleared_shells_set.add( self ) # assure we do not come here twice
715
all_shells = itertools.chain( self.node.toShells( self.plug.affected() ), self.outputs() )
716
for shell in all_shells:
717
shell.clearCache( clear_affected = True, cleared_shells_set = cleared_shells_set )
718
# END for each shell in all_shells to clear
723
__rshift__ = lambda self,other: self.connect( other, force=True )
724
# NOTE: this will cause problems when sorting them :) - so lets just use >> for the
725
# forced connection !
726
# __gt__ = lambda self,other: self.connect( other, force=False )
728
#} END name overrides
732
class Graph( nx.DiGraph, iDuplicatable ):
733
"""Holds the nodes and their connections
735
Nodes are kept in a separate list whereas the plug connections are kept
736
in the underlying DiGraph"""
738
#{ Overridden Object Methods
739
def __init__( self, **kwargs ):
740
"""initialize the DiGraph and add some additional attributes"""
741
super( Graph, self ).__init__( **kwargs )
742
self._nodes = set() # our processes from which we can make connections
745
"""Clear our graph"""
746
self.clear() # clear connections
748
# NOTE : nodes will remove themselves once they are not referenced anymore
752
def __getattr__( self , attr ):
753
"""Allows access to nodes by name just by accessing the graph directly"""
755
return self.nodeByID( attr )
757
return super( Graph, self ).__getattribute__( attr )
759
#} END object methods
762
def writeDot( self , fileOrPath ):
763
"""Write the connections in self to the given file object or path
764
:todo: remove if no longer needed"""
765
# associate every plugshell with its node create a more native look
766
writegraph = nx.DiGraph()
767
# but we do not use it as the edge attrs cannot be assigned anymore - dict has no unique keys
768
# writegraph.allow_multiedges()
771
for node in self.iterNodes():
772
writegraph.add_node( node, color="#ebba66", width=4, height=2, fontsize=22 )
773
# END for each node in graph
775
# now all the connections - just transfer them
776
for sshell,eshell in self.edges_iter():
777
writegraph.add_edge( sshell,eshell )
779
writegraph.add_edge( sshell.node, sshell, color="#000000" )
781
writegraph.add_node( sshell, color="#000000", label=sshell.plug )
782
writegraph.add_node( eshell, color="#000000", label=eshell.plug )
784
writegraph.add_edge( eshell,eshell.node, color="#000000" )
785
# END for each edge in graph
788
nx.write_dot(writegraph, fileOrPath)
792
#{ iDuplicatable Interface
793
def createInstance( self ):
794
"""Create a copy of self and return it"""
795
return self.__class__( )
797
def copyFrom( self, other ):
798
"""Duplicate all data from other graph into this one, create a duplicate
799
of the nodes as well"""
800
def copyshell( shell, nodemap ):
801
nodecpy = nodemap[ shell.node ]
803
# nodecpy - just get the shell of the given name directly - getattr always creates
804
# shells as it is equal to node.plugname
805
return getattr( nodecpy, shell.plug.name() )
807
# copy name ( networkx )
808
self.name = other.name
812
for node in other.iterNodes():
813
nodecpy = node.duplicate( add_to_graph = False ) # copy node
814
nodemap[ node ] = nodecpy
817
# add all nodemap values as nodes ( now that iteration is done
818
for duplnode in nodemap.itervalues():
819
self.addNode( duplnode )
822
for sshell,eshell in other.edges_iter():
823
# make fresh connections through shells - we do not know what kind of
824
# plugs they use, so they could be special and thus need special
826
cstart = copyshell( sshell, nodemap )
827
cend = copyshell( eshell, nodemap )
829
cstart.connect( cend )
830
# END for each edge( startshell, endshell )
837
def addNode( self, node ):
838
"""Add a new node instance to the graph
839
:note: node membership is exclusive, thus node instances
840
can only be in one graph at a time
841
:return: self, for chained calls"""
842
if not isinstance( node, NodeBase ):
843
raise TypeError( "Node %r must be of type NodeBase" % node )
845
# assure we do not remove ( and kill connections ) and re-add to ourselves
846
if node in self._nodes:
849
# remove node from existing graph
850
if node.graph is not None:
851
node.graph.removeNode( node )
854
self._nodes.add( node ) # assure the node knows us
855
node.graph = weakref.proxy( self )
857
return self # assure we have the graph set
859
def removeNode( self, node ):
860
"""Remove the given node from the graph ( if it exists in it )"""
863
for sshell, eshell in node.connections( 1, 1 ):
864
self.disconnect( sshell, eshell )
866
# assure the node does not call us anymore
868
self._nodes.remove( node )
872
def clearCache( self ):
873
"""Clear the cache of all nodes in the graph - this forces the graph
874
to reevaluate on the next request"""
875
for node in self._nodes:
882
def hasNode( self , node ):
883
""":return: True if the node is in this graph, false otherwise"""
884
return node in self._nodes
886
def iterNodes( self, predicate = lambda node: True ):
887
""":return: generator returning all nodes in this graph
888
:param predicate: if True for node, it will be returned
889
:note: there is no particular order"""
890
for node in self._nodes:
891
if predicate( node ):
895
def iterConnectedNodes( self, predicate = lambda node: True ):
896
""":return: generator returning all nodes that are connected in this graph,
897
in no particular order.
898
For an ordered itereration, use `iterShells`.
900
:param predicate: if True for node, it will be returned"""
901
# iterate digraph keeping the plugs only ( and thus connected nodes )
903
for node,plug in self.nodes_iter():
904
if node in nodes_seen:
906
nodes_seen.add( node )
907
if predicate( node ):
912
""":return: immutable copy of the nodes used in the graph"""
913
return tuple( self._nodes )
915
def numNodes( self ):
916
""":return: number of nodes in the graph"""
917
return len( self._nodes )
919
def nodeByID( self, nodeID ):
920
""":return: instance of a node according to the given node id
921
:raise NameError: if no such node exists in graph"""
922
for node in self.iterNodes():
923
if node.id() == nodeID:
926
raise NameError( "Node with ID %s not found in graph" % nodeID )
932
def connect( self, sourceshell, destinationshell, force = False ):
933
"""Connect this plug to destinationshell such that destinationshell is an input plug for our output
935
:param sourceshell: PlugShell being source of the connection
936
:param destinationshell: PlugShell being destination of the connection
937
:param force: if False, existing connections to destinationshell will not be broken, but an exception is raised
938
if True, existing connection may be broken
939
:return: self on success, allows chained connections
940
:raise PlugAlreadyConnected: if destinationshell is connected and force is False
941
:raise PlugIncompatible: if destinationshell does not appear to be compatible to this one"""
942
# assure both nodes are known to the graph
943
if not sourceshell.node.graph is destinationshell.node.graph:
944
raise AssertionError( "You cannot connect nodes from different graphs" )
946
self._nodes.add( sourceshell.node )
947
self._nodes.add( destinationshell.node )
949
# check compatability
950
if sourceshell.plug.attr.connectionAffinity( destinationshell.plug.attr ) == 0:
951
raise PlugIncompatible( "Cannot connect %r to %r as they are incompatible" % ( repr( sourceshell ), repr( destinationshell ) ) )
954
oinput = destinationshell.input( )
955
if oinput is not None:
956
if oinput == sourceshell:
960
raise PlugAlreadyConnected( "Cannot connect %r to %r as it is already connected" % ( repr( sourceshell ), repr( destinationshell ) ) )
963
oinput.disconnect( destinationshell )
964
# END destinationshell already connected
967
self.add_edge( sourceshell, v = destinationshell )
970
def disconnect( self, sourceshell, destinationshell ):
971
"""Remove the connection between sourceshell to destinationshell if they are connected
972
:note: does not raise if no connection is present"""
973
self.remove_edge( sourceshell, v = destinationshell )
975
# also, delete the plugshells if they are not connnected elsewhere
976
for shell in sourceshell,destinationshell:
977
if len( self.neighbors( shell ) ) == 0:
978
self.remove_node( shell )
980
def input( self, plugshell ):
981
""":return: the connected input plug of plugshell or None if there is no such connection
982
:note: input plugs have on plug at most, output plugs can have more than one connected plug"""
984
pred = self.predecessors( plugshell )
987
except nx.NetworkXError:
992
def outputs( self, plugshell, predicate = lambda x : True ):
993
""":return: a list of plugs being the destination of the connection to plugshell
994
:param predicate: plug will only be returned if predicate is true for it - shells will be passed in """
996
return [ s for s in self.successors( plugshell ) if predicate( s ) ]
997
except nx.NetworkXError:
1003
class _NodeBaseCheckMeta( type ):
1004
"""Class checking the consistency of the nodebase class before it is being created"""
1005
def __new__( metacls, name, bases, clsdict ):
1007
- every plugname must correspond to a node member name
1009
newcls = super( _NodeBaseCheckMeta, metacls ).__new__( metacls, name, bases, clsdict )
1011
# EVERY PLUG NAME MUST MATCH WITH THE ACTUAL NAME IN THE CLASS
1012
# set the name according to its slot name in the parent class
1013
membersdict = inspect.getmembers( newcls ) # do not filter, as plugs could be overridden
1015
if hasattr( newcls, "plugsStatic" ):
1016
for plug in newcls.plugsStatic( ):
1017
for name,member in membersdict:
1018
if member == plug and plug.name() != name:
1020
if hasattr( plug, 'setName' ):
1021
plug.setName( name )
1023
raise AssertionError( "Plug %r is named %s, but must be named %s as in its class %s" % ( plug, plug.name(), name, newcls ) )
1024
# END setName special handling
1025
# END if member nanme is wrong
1026
# END for each class member
1028
# ignore plugs we possibly did not find in the physical class
1029
# END for each plug in class
1030
# END if method exists
1032
# it can be that a subclass overrides this method and makes it an instance method
1033
# this is valid - the rest of the dgengine always accesses this method
1034
# through instance - so we have to handle it
1042
class NodeBase( iDuplicatable ):
1043
"""Base class that provides support for plugs to the superclass.
1044
It will create some simple tracking attriubtes required for the plug system
1047
Nodes can compute values of their plugs if these do not have a cache.
1049
Nodes are identified by an ID - the default graph implementation though will
1050
be okay with just having instances.
1051
It is also being used for string representations of this node"""
1052
shellcls = _PlugShell # class used to instantiate new shells
1053
__metaclass__ = _NodeBaseCheckMeta # check the class before its being created
1055
#{ Overridden from Object
1056
def __init__( self, *args, **kwargs ):
1057
"""We require a directed graph to track the connectivity between the plugs.
1058
It must be supplied by the super class and should be as global as required to
1059
connecte the NodeBases together properly.
1061
:param kwargs: 'id' = id of the instance, defaults to None if it is not required
1062
:note: we are super() compatible, and assure our base is initialized correctly"""
1067
newid = kwargs.get( 'id', None )
1071
def __del__( self ):
1072
"""Remove ourselves from the graph and delete our connections"""
1073
# check if item does still exist - this is not the case if the graph
1074
# is currently being deleted
1076
#self.graph.removeNode( self ) # TODO: take back in and make it work ! Problems with facade nodes
1078
except (AttributeError,ReferenceError): # .graph could be None
1081
def __str__( self ):
1082
"""Use our id as string or the default implementation"""
1083
if self.id() is not None:
1084
return str( self.id() )
1086
return super( NodeBase, self ).__str__( )
1087
#} Overridden from Object
1089
#{ iDuplicatable Interface
1090
def createInstance( self, *args, **kwargs ):
1091
"""Create a copy of self and return it
1093
:note: override by subclass - the __init__ methods shuld do the rest"""
1094
return self.__class__( id = self.id() )
1096
def copyFrom( self, other, add_to_graph = True ):
1097
"""Just take the graph from other, but do not ( never ) duplicate it
1099
:param add_to_graph: if true, the new node instance will be added to the graph of
1100
:note: default implementation does not copy plug caches ( which are stored in
1101
the node dict - this is because a reevaluate is usually required on the
1103
self.setID( other.id() ) # id copying would create equally named clones for now
1104
if add_to_graph and other.graph: # add ourselves to the graph of the other node
1105
other.graph.addNode( self )
1107
#} END iDuplicatable
1110
def compute( self, plug, mode ):
1111
"""Called whenever a plug needs computation as the value its value is not
1112
cached or marked dirty ( as one of the inputs changed )
1114
:param plug: the static plug instance that requested which requested the computation.
1115
It is the instance you defined on the class
1116
:param mode: the mode of operation. Its completely up to the superclasses how that
1117
attribute is going to be used
1118
:note: to be implemented by superclass """
1119
raise NotImplementedError( "To be implemented by subclass" )
1121
#} END base interface
1124
def setID( self, newID ):
1125
"""Set id of this node to newiD
1126
:return: previously assigned id"""
1132
""":return: ID of this instance"""
1138
def toShells( self, plugs ):
1139
""":return: list of shells made from plugs and our node"""
1140
# may not use it as generator as it binds variables ( of course ! )
1143
outlist.append( self.toShell( plug ) )
1146
def toShell( self, plug ):
1147
""":return: a plugshell as suitable to for this class"""
1148
return getattr( self, 'shellcls' )( self, plug ) # prevent cls variable to be bound !
1150
def clearCache( self ):
1151
"""Clear the cache of all plugs on this node - this basically forces it
1152
to recompute the next time an output plug is being queried"""
1153
for plug in self.plugs( ):
1154
self.toShell( plug ).clearCache( clear_affected = False )
1157
def plugsStatic( cls, predicate = lambda x: True ):
1158
""":return: list of static plugs as defined on this node - they are class members
1159
:param predicate: return static plug only if predicate is true
1160
:note: Use this method only if you do not have an instance - there are nodes
1161
that actually have no static plug information, but will dynamically generate them.
1162
For this to work, they need an instance - thus the plugs method is an instance
1163
method and is meant to be the most commonly used one."""
1164
pred = lambda m: isinstance( m, plug )
1167
pluggen = ( m[1] for m in inspect.getmembers( cls, predicate = pred ) if predicate( m[1] ) )
1168
return list( pluggen )
1170
def plugs( self, predicate = lambda x: True ):
1171
""":return: list of dynamic plugs as defined on this node - they are usually retrieved
1172
on class level, but may be overridden on instance level
1173
:param predicate: return static plug only if predicate is true"""
1174
# the getmembers function appears to be ... buggy with my classes
1175
# use special handling to assure he gets all the instance members AND the class members
1176
# In ipython tests this worked as expected - get the dicts individually
1177
all_dict_holders = itertools.chain( ( self, ), self.__class__.mro() )
1178
all_dicts = ( instance.__dict__ for instance in all_dict_holders )
1179
pluggen = ( v for d in all_dicts for v in d.itervalues() if isinstance( v, plug ) and predicate( v ) )
1181
return list( pluggen )
1184
def inputPlugsStatic( cls, **kwargs ):
1185
""":return: list of static plugs suitable as input
1186
:note: convenience method"""
1187
return cls.plugsStatic( predicate = lambda p: p.providesInput(), **kwargs )
1189
def inputPlugs( self, **kwargs ):
1190
""":return: list of plugs suitable as input
1191
:note: convenience method"""
1192
return self.plugs( predicate = lambda p: p.providesInput(), **kwargs )
1195
def outputPlugsStatic( cls, **kwargs ):
1196
""":return: list of static plugs suitable to deliver output
1197
:note: convenience method"""
1198
return cls.plugsStatic( predicate = lambda p: p.providesOutput(), **kwargs )
1200
def outputPlugs( self, **kwargs ):
1201
""":return: list of plugs suitable to deliver output
1202
:note: convenience method"""
1203
return self.plugs( predicate = lambda p: p.providesOutput(), **kwargs )
1205
def connections( self, inpt, output ):
1206
""":return: Tuples of input shells defining a connection of the given type from
1207
tuple( InputNodeOuptutShell, OurNodeInputShell ) for input connections and
1208
tuple( OurNodeOuptutShell, OutputNodeInputShell )
1209
:param inpt: include input connections to this node
1210
:param output: include output connections ( from this node to others )"""
1211
outConnections = list()
1212
plugs = self.plugs()
1215
shells = self.toShells( ( p for p in plugs if p.providesInput() ) )
1216
for shell in shells:
1217
ishell = shell.input( )
1219
outConnections.append( ( ishell, shell ) )
1220
# END for each shell in this node's shells
1221
# END input handling
1225
shells = self.toShells( ( p for p in plugs if p.providesOutput() ) )
1226
for shell in shells:
1227
outConnections.extend( ( ( shell, oshell ) for oshell in shell.outputs() ) )
1228
# END output handling
1230
return outConnections
1233
def filterCompatiblePlugs( cls, plugs, attrOrValue, raise_on_ambiguity = False, attr_affinity = False,
1234
attr_as_source=True ):
1235
""":return: sorted list of (rate,plug) tuples suitable to deal with the given attribute.
1236
Thus they could connect to it as well as get their value set.
1237
Most suitable plug comes first.
1238
Incompatible plugs will be pruned.
1239
:param attrOrValue: either an attribute or the value you would like to set to the
1240
attr at the plug in question.
1241
:param raise_on_ambiguity: if True, the method raises if a plug has the same
1242
rating as another plug already on the output list, thus it's not clear anymore
1243
which plug should handle a request
1244
:param attr_affinity: if True, it will not check connection affinity, but attribute
1245
affinity only. It checks how compatible the attributes of the plugs are, disregarding
1246
whether they can be connected or not
1247
Only valid if attrOrValue is an attribute
1248
:param attr_as_source: if True, attrOrValue will be treated as the source of a connection or
1249
each plug would need to take its values.
1250
if False, attrOrValue is the destination of a connection and it needs to take values of the given plugs
1251
or they would connect to it. Only used if attrOrValue is an attribute.
1252
:raise TypeError: if ambiguous input was found"""
1256
if isinstance( attrOrValue, Attribute ):
1257
attribute = attrOrValue
1263
sourceattr = attribute
1264
destinationattr = plug.attr
1265
if not attr_as_source:
1266
destinationattr = attribute
1267
sourceattr = plug.attr
1270
rate = destinationattr.affinity( sourceattr ) # how good can dest store source ?
1272
rate = sourceattr.connectionAffinity( destinationattr )
1273
# END which affinity type
1274
# END attribute rating
1276
rate = plug.attr.compatabilityRate( value )
1282
outSorted.append( ( rate, plug ) )
1286
outSorted.reverse() # high rates first
1288
if raise_on_ambiguity:
1290
for rate,plug in outSorted:
1291
ratemap.setdefault( rate, list() ).append( plug )
1292
# END for each compatible plug
1294
for rate, pluglist in ratemap.iteritems( ):
1295
if len( pluglist ) > 1:
1296
report += "Rate: %i :" % rate
1297
for plug in pluglist:
1298
report += "\n%s" % str(plug)
1299
# END if ambiguous plugs
1300
# END for each rate in ratemap
1302
report = "Ambiguous plugs found\n" + report
1303
raise TypeError( report )
1304
# END ambiguous check