1
2 """Contains a simple but yet powerful dependency graph engine allowing computations
3 to be organized more efficiently.
4 """
5 __docformat__ = "restructuredtext"
6
7 import networkx as nx
8 from collections import deque
9 import inspect
10 import weakref
11 import itertools
12 from util import iDuplicatable
13
14 __all__ = ("ConnectionError", "PlugIncompatible", "PlugAlreadyConnected", "AccessError",
15 "NotWritableError", "NotReadableError", "MissingDefaultValueError", "ComputeError",
16 "ComputeFailed", "ComputeFailed", "PlugUnhandled",
17 "iterShells", "Attribute", "iPlug", "plug", "Graph", "NodeBase")
25 """Exception base for all plug related errors"""
26
28 """Thrown whenever plugs are not compatible with each other during connection"""
29
31 """Thrown if one tries to connect a plug to otherplug when otherplug is already connected"""
32
34 """Base class for all errors indicating invalid access"""
35
37 """Thrown if a non-writable plug is being written to"""
38
40 """Thrown if a non-readable attribute is being read"""
41
43 """Thrown if a default value is missing for input attributes that are not connected"""
44
46 """Thrown if the computation done by a plug failed by an unknown exception
47 It will be passed on in the exception"""
48
50 """Raised by the derived class computing a value if the computational goal
51 cannot be achieved ( anymore )"""
52
54 """Raised if a plug was not handled by the node's compute method"""
55
56
57
58
59
60
61
62
63
64 -def iterShells( rootPlugShell, stopAt = lambda x: False, prune = lambda x: False,
65 direction = "up", visit_once = False, branch_first = False ):
66 """Iterator starting at rootPlugShell going "up"stream ( input ) or "down"stream ( output )
67 breadth first over plugs, applying filter functions as defined.
68
69 :param rootPlugShell: shell at which to start the traversal. The root plug will be returned as well
70 :param stopAt: if function returns true for given PlugShell, iteration will not proceed
71 at that point ( possibly continuing at other spots ). Function will always be called, even
72 if the shell would be pruned as well. The shell serving as stop marker will not be returned
73 :param prune: if function returns true for given PlugShell, the shell will not be returned
74 but iteration continues.
75 :param direction: traversal direction
76 "up" upstream, in direction of inputs of plugs
77 "down" downstream, in direction of outputs of plugs
78 :param visit_once: if True, plugs will only be returned once, even though they are
79 :param branch_first: if True, individual branches will be travelled first ( thuse the node will be left quickly following the datastream ).
80 If False, the plugs on the ndoe will be returned first before proceeding to the next node
81 encountered several times as several noodes are connected to them in some way. """
82 visited = set()
83 stack = deque()
84 stack.append( rootPlugShell )
85
86 def addToStack( node, stack, lst, branch_first ):
87 if branch_first:
88 stack.extend( node.toShell( plug ) for plug in lst )
89 else:
90 reviter = ( node.toShell( lst[i] ) for i in range( len( lst )-1,-1,-1) )
91 stack.extendleft( reviter )
92
93
94 def addOutputToStack( stack, lst, branch_first ):
95 if branch_first:
96 stack.extend( lst )
97 else:
98 stack.extendleft( reversed( lst[:] ) )
99
100
101 while stack:
102 shell = stack.pop()
103 if shell in visited:
104 continue
105
106 if visit_once:
107 visited.add( shell )
108
109 if stopAt( shell ):
110 continue
111
112 if not prune( shell ):
113 yield shell
114
115 if direction == 'up':
116
117 addToStack( shell.node, stack, shell.plug.affectedBy(), branch_first )
118
119
120
121 ishell = shell.input( )
122 if ishell:
123 if branch_first:
124 stack.append( ishell )
125 else:
126 stack.appendleft( ishell )
127
128
129 else:
130
131
132 if branch_first:
133
134 addToStack( shell.node, stack, shell.plug.affected(), branch_first )
135 addOutputToStack( stack, shell.outputs(), branch_first )
136 else:
137 addOutputToStack( stack, shell.outputs(), branch_first )
138 addToStack( shell.node, stack, shell.plug.affected(), branch_first )
139
140
141
142
143
144
145
146
147
148
149
150
151
152 -class Attribute( object ):
153 """Simple class defining the type of a plug and several flags that
154 affect it. Additionally it can determine how well suited another attribute is
155
156 **Flags**:
157 exact_type: if True, derived classes of our typecls are not considered to be a valid type.
158 i.e: basestring could be stored in a str attr if exact type is false - its less than we need, but
159 still something.
160 Putting a str into a basestring attribute will always work though, as it would be more than we need
161 readonly: if True, the attribute's plug cannot be written to. Read-only attributes can be used
162 as storage that the user can read, but not write.
163 You can write read-only plugs by directly setting its cache - this of course - is only
164 for the node itself, but will never be done by the framework
165
166 **computable**:
167 Nodes are automatically computable if they are affected by another plug.
168 If this is not the case, they are marked input only and are not computed.
169 If this flag is true, even unaffeted plugs are computable.
170 Plugs that affect something are automatically input plugs and will not be computed.
171 If the plug does not affect anything and this flag is False, they are seen as input plugs
172 anyway.
173
174 The system does not allow plugs to be input and output plugs at the same time, thus your compute
175 cannot be triggered by your own compute
176
177 cls: if True, the plug requires classes to be set ( instances of 'type' ) , but no instances of these classes
178 uncached: if False, computed values may be cached, otherwise they will always be recomputed.
179 unconnectable: if True, the node cannot be the destination of a connection
180 check_passing_values: check each value as it flows through a connection - usually compatability is only checked
181 on connection and once values are set, but not if they flow through an existing connection
182
183 **Default Values**:
184 Although default values can be simple primitives are classes, a callable is specifically supported.
185 It allows you to get a callback whenever a default value is required.
186 The same result could be achieved by connected the plug in question, but dynamic defaults are a quick
187 way to achive that.
188 Your returned value will be type-checked against the required type if check_passing_values is set."""
189 kNo, kGood, kPerfect = 0, 127, 255
190 exact_type, readonly, computable, cls, uncached, unconnectable,check_passing_values = ( 1, 2, 4, 8, 16, 32, 64 )
191
192 - def __init__( self, typeClass, flags, default = None ):
193 self.typecls = typeClass
194 self.flags = flags
195 self._default = default
196
197
198 if default is not None:
199 if not hasattr( default, '__call__' ) and self.compatabilityRate( default ) == 0:
200 raise TypeError( "Default value %r is not compatible with this attribute" % default )
201
202
204 """ compute class rating
205
206 :return: rating based on value being a class and compare.
207 0 means there is no type compatability, 255 matches comparecls, or linearly
208 less if is just part of the mro of value
209 """
210 if not isinstance( cls, type ):
211 return 0
212
213 mro = self.typecls.mro()
214 mro.reverse()
215
216 if not cls in mro:
217
218
219 if not exact_type and self.typecls in cls.mro():
220 return self.kPerfect
221 return 0
222
223
224 if len( mro ) == 1:
225 return self.kPerfect
226
227 rate = ( float( mro.index( cls ) ) / float( len( mro ) - 1 ) ) * self.kPerfect
228
229 if exact_type and rate != self.kPerfect:
230 return 0
231
232 return rate
233
234
235
237 """Compute affinity for otherattr.
238
239 :return:
240 rating from 0 to 255 defining how good the attribtues match each
241 other in general - how good can we store values of otherattr ?
242 Thus this comparison is directed.
243
244 :note: for checking connections, use `connectionAffinity`"""
245
246 if self.flags & self.cls != otherattr.flags & self.cls:
247 return 0
248
249
250
251
252
253 rate = self.kNo
254 try:
255 defvalue = otherattr.default()
256 rate = self.compatabilityRate( defvalue )
257 except (MissingDefaultValueError,TypeError):
258 rate = self._getClassRating( otherattr.typecls, self.flags & self.exact_type )
259
260
261 return rate
262
264 """Compute connection affinity for given destination attribute
265
266 :return:
267 rating from 0 to 255 defining the quality of the connection to
268 otherplug. an affinity of 0 mean connection is not possible, 255 mean the connection
269 is perfectly suited.
270 The connection is a directed one from self -> otherplug """
271 if destinationattr.flags & self.unconnectable:
272 return 0
273
274
275 return destinationattr.affinity( self )
276
278 """Compute value's compatability rate
279
280 :return: value between 0 and 255, 0 means no compatability, 255 a perfect match.
281 if larger than 0, the plug can hold the value ( assumed the flags are set correctly ). """
282 if isinstance( value, type ):
283
284 if not self.flags & self.cls:
285 return 0
286
287
288 return self._getClassRating( value, self.flags & self.exact_type )
289
290 else:
291 if not self.flags & self.cls:
292 return self._getClassRating( value.__class__, self.flags & self.exact_type )
293
294
295 return 0
296
298 """:return: default value stored for this attribute, or raise
299 :note: handles dynamic defaults, so you should not directly access the default member variable
300 :raise MissingDefaultValueError: if attribute does not have a default value
301 :raise TypeError: if value returned by dynamic attribute has incorrect type"""
302 if self._default is None:
303 raise MissingDefaultValueError( "Attribute %r has no default value" % self )
304
305
306
307 if hasattr( self._default, '__call__' ):
308 default = self._default()
309 if self.flags & self.check_passing_values and self.compatabilityRate( default ) == 0:
310 raise TypeError( "Dynamic default value had incorrect type: %s" % type( default ) )
311 return default
312
313
314
315 return self._default
316
317
318
319
320
321
322 -class iPlug( object ):
323 """Defines an interface allowing to compare compatabilies according to types.
324
325 Plugs can either be input plugs or output plugs - output plugs affect no other
326 plug on a node, but are affected by 0 or more plugs .
327
328 By convention, a plug has a name - that name must also be the name of the
329 member attribute that stores the plag. Plugs, possibly different instances of it,
330 need to be re-retrieved on freshly duplicated nodes to allow graph duplication to
331 be done properly
332
333 :note: if your plug class supports the ``setName`` method, a metaclass will
334 adjust the name of your plug to match the name it has in the parent class
335 """
336 kNo,kGood,kPerfect = ( 0, 127, 255 )
337
338
341
342
343
344
345
347 """:return: name of the plug ( the name that identifies it on the node"""
348 raise NotImplementedError( "Implement this in subclass" )
349
351 """Set an affects relation ship between this plug and otherplug, saying
352 that this plug affects otherplug."""
353 raise NotImplementedError( "Implement this in subclass" )
354
356 """:return: tuple containing affected plugs ( plugs that are affected by our value )"""
357 raise NotImplementedError( "Implement this in subclass" )
358
360 """:return: tuple containing plugs that affect us ( plugs affecting our value )"""
361 raise NotImplementedError( "Implement this in subclass" )
362
364 """:return: True if this is an output plug that can trigger computations"""
365 raise NotImplementedError( "Implement this in subclass" )
366
370
371
372
373
374 -class plug( iPlug ):
375 """Defines an interface allowing to compare compatabilies according to types.
376
377 Plugs are implemented as descriptors, thus they will be defined on node class
378 level, and all static information will remain static
379
380 As descriptors, they are defined statically on the class, and some additional information
381 such as connectivity, is stored on the respective class instance. These special methods
382 are handled using `NodeBase` class
383
384 Plugs are implemented as descriptors as all type information can be kept per class,
385 whereas only connection information changes per node instance.
386
387 Plugs can either be input plugs or output plugs - output plugs affect no other
388 plug on a node, but are affected by 0 or more plugs
389
390 :note: class is lowercase as it is used as descriptor ( acting more like a function )
391 """
392 kNo,kGood,kPerfect = ( 0, 127, 255 )
393
394
396 """Intialize the plug with a distinctive name"""
397 self._name = None
398 self.attr = attribute
399 self._affects = list()
400 self._affectedBy = list()
401
402
403
404
405
406 - def __get__( self, obj, cls=None ):
407 """A value has been requested - return our plugshell that brings together
408 both, the object and the static plug"""
409
410 if obj is not None:
411 return obj.toShell( self )
412
413
414 return self
415
416
417
418 """We do not use a set method, allowing to override our descriptor through
419 actual plug instances in the instance dict. Once deleted, we shine through again"""
420
421
422
423
424
425
427 """:return: name of plug"""
428 return self._name
429
431 """Set the name of this plug - can be set only once"""
432 if not self._name:
433 self._name = name
434 else:
435 raise ValueError( "The name of the plug can only be set once" )
436
438 """Set an affects relation ship between this plug and otherplug, saying
439 that this plug affects otherplug."""
440 if otherplug not in self._affects:
441 self._affects.append( otherplug )
442
443 if self not in otherplug._affectedBy:
444 otherplug._affectedBy.append( self )
445
447 """:return: tuple containing affected plugs ( plugs that are affected by our value )"""
448 return tuple( self._affects )
449
451 """:return: tuple containing plugs that affect us ( plugs affecting our value )"""
452 return tuple( self._affectedBy )
453
455 """:return: True if this is an output plug that can trigger computations"""
456 return bool( len( self.affectedBy() ) != 0 or self.attr.flags & Attribute.computable )
457
462
468 """Handles per-node-instance plug connection setup and storage. As plugs are
469 descriptors and thus an instance of the class, per-node-instance information needs
470 special treatment.
471 This class is being returned whenever the descriptors get and set methods are called,
472 it contains information about the node and the plug being involved, allowing to track
473 connection info directly using the node dict
474
475 This allows plugs to be connected, and information to flow through the dependency graph.
476 Plugs never act alone since they always belong to a parent node that will be asked for
477 value computations if the value is not yet cached.
478 :note: Do not instantiate this class youself, it must be created by the node as different
479 node types can use different versions of this shell"""
480
481
482
485
487 """Allow easy attribute access while staying memory efficient"""
488 if attr == 'node':
489 return self[0]
490 if attr == 'plug':
491 return self[1]
492
493
494 return super( _PlugShell, self ).__getattribute__( attr )
495
497 return "%s.%s" % ( self.node, self.plug )
498
501
502
503
504
505
506
507 - def get( self, mode = None ):
552
553
554
555 - def set( self, value, ignore_connection = False ):
556 """Set the given value to be used in our plug
557 :param ignore_connection: if True, the plug can be destination of a connection and
558 will still get its value set - usually it would be overwritten by the value form the
559 connection. The set value will be cleared if something upstream in it's connection chain
560 changes.
561 :raise AssertionError: the respective attribute must be cached, otherwise
562 the value will be lost"""
563 flags = self.plug.attr.flags
564 if flags & Attribute.readonly:
565 raise NotWritableError( "Plug %r is not writable" % repr(self) )
566
567 if self.plug.providesOutput( ):
568 raise NotWritableError( "Plug %r is not writable as it provides an output itself" % repr(self) )
569
570 if flags & Attribute.uncached:
571 raise AssertionError( "Writable attributes must be cached - otherwise the value will not be held" )
572
573
574 if not ignore_connection and self.input() is not None:
575 raise NotWritableError( "Plug %r is connected to %r and thus not explicitly writable" % ( self, self.input() ) )
576
577 self.setCache( value )
578
579
581 """Compute compatability rate for teh given value
582
583 :return: value between 0 and 255, 0 means no compatability, 255 a perfect match
584 if larger than 0, the plug can hold the value ( assumed the flags are set correctly )
585 """
586 return self.plug.attr.compatabilityRate( value )
587
588
589
590
591
592
593 - def connect( self, otherplugshell, **kwargs ):
594 """Connect this plug to otherplugshell such that otherplugshell is an input plug for our output
595
596 :param kwargs: everything supported by `Graph.connect`
597 :return: self on success, allows chained connections
598 :raise PlugAlreadyConnected: if otherplugshell is connected and force is False
599 :raise PlugIncompatible: if otherplugshell does not appear to be compatible to this one"""
600 if not isinstance( otherplugshell, _PlugShell ):
601 raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) )
602
603 return self.node.graph.connect( self, otherplugshell, **kwargs )
604
605
607 """Remove the connection to otherplugshell if we are connected to it.
608 :note: does not raise if no connection is present"""
609 if not isinstance( otherplugshell, _PlugShell ):
610 raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) )
611
612 return self.node.graph.disconnect( self, otherplugshell )
613
623
624 - def outputs( self, predicate = lambda shell: True ):
625 """:return: a list of plugs being the destination of the connection
626 :param predicate: plug will only be returned if predicate is true for it - shells will be passed in """
627 return self.node.graph.outputs( self, predicate = predicate )
628
629 - def connections( self, inpt, output, predicate = lambda shell: True ):
630 """:return: get all input and or output connections from this shell
631 or to this shell as edges ( sourceshell, destinationshell )
632 :param predicate: return true for each destination shell that you can except in the
633 returned edge or the sourceshell where your shell is the destination.
634 :note: Use this method to get edges read for connection/disconnection"""
635 outcons = list()
636 if inpt:
637 sourceshell = self.input( predicate = predicate )
638 if sourceshell:
639 outcons.append( ( sourceshell, self ) )
640
641
642 if output:
643 outcons.extend( ( self, oshell ) for oshell in self.outputs( predicate = predicate ) )
644
645 return outcons
646
648 """:return: True, if the shell is connected as source or as destination of a connection"""
649 return self.input() or self.outputs()
650
652 """Iterate plugs and their connections starting at this plug
653 :return: generator for plug shells
654 :note: supports all options of `iterShells`, this method allows syntax like:
655 node.outAttribute.iterShells( )"""
656 return iterShells( self, **kwargs )
657
658
659
660
661
664
666 """:return: True if currently store a cached value"""
667 return hasattr( self.node, self._cachename() )
668
670 """Set the given value to be stored in our cache
671 :raise: TypeError if the value is not compatible to our defined type"""
672
673
674 if self.plug.attr.compatabilityRate( value ) == 0:
675 raise TypeError( "Plug %r cannot hold value %r as it is not compatible" % ( repr( self ), repr( value ) ) )
676
677 if self.plug.attr.flags & Attribute.uncached:
678 return
679
680
681
682 self.clearCache( clear_affected = True )
683 setattr( self.node, self._cachename(), value )
684
686 """:return: the cached value or raise
687 :raise ValueError:"""
688 if self.hasCache():
689 return getattr( self.node, self._cachename() )
690
691 raise ValueError( "Plug %r did not have a cached value" % repr( self ) )
692
693 - def clearCache( self, clear_affected = False, cleared_shells_set = None ):
694 """Empty the cache of our plug
695 :param clear_affected: if True, the caches of our affected plugs ( connections
696 or affects relations ) will also be cleared
697 This operation is recursive, and needs to be as different shells on different nodes
698 might do things differently.
699 :param cleared_shells_set: if set, it can be used to track which plugs have already been dirtied to
700 prevent recursive loops
701 Propagation will happen even if we do not have a cache to clear ourselves """
702 if self.hasCache():
703 del( self.node.__dict__[ self._cachename() ] )
704
705 if clear_affected:
706
707 if not cleared_shells_set:
708 cleared_shells_set = set()
709
710 if self in cleared_shells_set:
711 return
712
713 cleared_shells_set.add( self )
714
715 all_shells = itertools.chain( self.node.toShells( self.plug.affected() ), self.outputs() )
716 for shell in all_shells:
717 shell.clearCache( clear_affected = True, cleared_shells_set = cleared_shells_set )
718
719
720
721
722
723 __rshift__ = lambda self,other: self.connect( other, force=True )
724
725
726
727
728
729
730
731
732 -class Graph( nx.DiGraph, iDuplicatable ):
733 """Holds the nodes and their connections
734
735 Nodes are kept in a separate list whereas the plug connections are kept
736 in the underlying DiGraph"""
737
738
740 """initialize the DiGraph and add some additional attributes"""
741 super( Graph, self ).__init__( **kwargs )
742 self._nodes = set()
743
745 """Clear our graph"""
746 self.clear()
747
748
749 self._nodes.clear()
750
751
753 """Allows access to nodes by name just by accessing the graph directly"""
754 try:
755 return self.nodeByID( attr )
756 except NameError:
757 return super( Graph, self ).__getattribute__( attr )
758
759
760
761
763 """Write the connections in self to the given file object or path
764 :todo: remove if no longer needed"""
765
766 writegraph = nx.DiGraph()
767
768
769
770
771 for node in self.iterNodes():
772 writegraph.add_node( node, color="#ebba66", width=4, height=2, fontsize=22 )
773
774
775
776 for sshell,eshell in self.edges_iter():
777 writegraph.add_edge( sshell,eshell )
778
779 writegraph.add_edge( sshell.node, sshell, color="#000000" )
780
781 writegraph.add_node( sshell, color="#000000", label=sshell.plug )
782 writegraph.add_node( eshell, color="#000000", label=eshell.plug )
783
784 writegraph.add_edge( eshell,eshell.node, color="#000000" )
785
786
787
788 nx.write_dot(writegraph, fileOrPath)
789
790
791
792
794 """Create a copy of self and return it"""
795 return self.__class__( )
796
798 """Duplicate all data from other graph into this one, create a duplicate
799 of the nodes as well"""
800 def copyshell( shell, nodemap ):
801 nodecpy = nodemap[ shell.node ]
802
803
804
805 return getattr( nodecpy, shell.plug.name() )
806
807
808 self.name = other.name
809
810
811 nodemap = dict()
812 for node in other.iterNodes():
813 nodecpy = node.duplicate( add_to_graph = False )
814 nodemap[ node ] = nodecpy
815
816
817
818 for duplnode in nodemap.itervalues():
819 self.addNode( duplnode )
820
821
822 for sshell,eshell in other.edges_iter():
823
824
825
826 cstart = copyshell( sshell, nodemap )
827 cend = copyshell( eshell, nodemap )
828
829 cstart.connect( cend )
830
831
832
833
834
835
836
838 """Add a new node instance to the graph
839 :note: node membership is exclusive, thus node instances
840 can only be in one graph at a time
841 :return: self, for chained calls"""
842 if not isinstance( node, NodeBase ):
843 raise TypeError( "Node %r must be of type NodeBase" % node )
844
845
846 if node in self._nodes:
847 return self
848
849
850 if node.graph is not None:
851 node.graph.removeNode( node )
852
853
854 self._nodes.add( node )
855 node.graph = weakref.proxy( self )
856
857 return self
858
860 """Remove the given node from the graph ( if it exists in it )"""
861 try:
862
863 for sshell, eshell in node.connections( 1, 1 ):
864 self.disconnect( sshell, eshell )
865
866
867 node.graph = None
868 self._nodes.remove( node )
869 except KeyError:
870 pass
871
873 """Clear the cache of all nodes in the graph - this forces the graph
874 to reevaluate on the next request"""
875 for node in self._nodes:
876 node.clearCache()
877
878
879
880
881
883 """:return: True if the node is in this graph, false otherwise"""
884 return node in self._nodes
885
886 - def iterNodes( self, predicate = lambda node: True ):
887 """:return: generator returning all nodes in this graph
888 :param predicate: if True for node, it will be returned
889 :note: there is no particular order"""
890 for node in self._nodes:
891 if predicate( node ):
892 yield node
893
894
896 """:return: generator returning all nodes that are connected in this graph,
897 in no particular order.
898 For an ordered itereration, use `iterShells`.
899
900 :param predicate: if True for node, it will be returned"""
901
902 nodes_seen = set()
903 for node,plug in self.nodes_iter():
904 if node in nodes_seen:
905 continue
906 nodes_seen.add( node )
907 if predicate( node ):
908 yield node
909
910
912 """:return: immutable copy of the nodes used in the graph"""
913 return tuple( self._nodes )
914
916 """:return: number of nodes in the graph"""
917 return len( self._nodes )
918
920 """:return: instance of a node according to the given node id
921 :raise NameError: if no such node exists in graph"""
922 for node in self.iterNodes():
923 if node.id() == nodeID:
924 return node
925
926 raise NameError( "Node with ID %s not found in graph" % nodeID )
927
928
929
930
931
932 - def connect( self, sourceshell, destinationshell, force = False ):
933 """Connect this plug to destinationshell such that destinationshell is an input plug for our output
934
935 :param sourceshell: PlugShell being source of the connection
936 :param destinationshell: PlugShell being destination of the connection
937 :param force: if False, existing connections to destinationshell will not be broken, but an exception is raised
938 if True, existing connection may be broken
939 :return: self on success, allows chained connections
940 :raise PlugAlreadyConnected: if destinationshell is connected and force is False
941 :raise PlugIncompatible: if destinationshell does not appear to be compatible to this one"""
942
943 if not sourceshell.node.graph is destinationshell.node.graph:
944 raise AssertionError( "You cannot connect nodes from different graphs" )
945
946 self._nodes.add( sourceshell.node )
947 self._nodes.add( destinationshell.node )
948
949
950 if sourceshell.plug.attr.connectionAffinity( destinationshell.plug.attr ) == 0:
951 raise PlugIncompatible( "Cannot connect %r to %r as they are incompatible" % ( repr( sourceshell ), repr( destinationshell ) ) )
952
953
954 oinput = destinationshell.input( )
955 if oinput is not None:
956 if oinput == sourceshell:
957 return sourceshell
958
959 if not force:
960 raise PlugAlreadyConnected( "Cannot connect %r to %r as it is already connected" % ( repr( sourceshell ), repr( destinationshell ) ) )
961
962
963 oinput.disconnect( destinationshell )
964
965
966
967 self.add_edge( sourceshell, v = destinationshell )
968 return sourceshell
969
970 - def disconnect( self, sourceshell, destinationshell ):
971 """Remove the connection between sourceshell to destinationshell if they are connected
972 :note: does not raise if no connection is present"""
973 self.remove_edge( sourceshell, v = destinationshell )
974
975
976 for shell in sourceshell,destinationshell:
977 if len( self.neighbors( shell ) ) == 0:
978 self.remove_node( shell )
979
991
992 - def outputs( self, plugshell, predicate = lambda x : True ):
993 """:return: a list of plugs being the destination of the connection to plugshell
994 :param predicate: plug will only be returned if predicate is true for it - shells will be passed in """
995 try:
996 return [ s for s in self.successors( plugshell ) if predicate( s ) ]
997 except nx.NetworkXError:
998 return list()
999
1038
1039
1040
1041
1042 -class NodeBase( iDuplicatable ):
1043 """Base class that provides support for plugs to the superclass.
1044 It will create some simple tracking attriubtes required for the plug system
1045 to work
1046
1047 Nodes can compute values of their plugs if these do not have a cache.
1048
1049 Nodes are identified by an ID - the default graph implementation though will
1050 be okay with just having instances.
1051 It is also being used for string representations of this node"""
1052 shellcls = _PlugShell
1053 __metaclass__ = _NodeBaseCheckMeta
1054
1055
1056 - def __init__( self, *args, **kwargs ):
1057 """We require a directed graph to track the connectivity between the plugs.
1058 It must be supplied by the super class and should be as global as required to
1059 connecte the NodeBases together properly.
1060
1061 :param kwargs: 'id' = id of the instance, defaults to None if it is not required
1062 :note: we are super() compatible, and assure our base is initialized correctly"""
1063 self.graph = None
1064 self._id = None
1065
1066
1067 newid = kwargs.get( 'id', None )
1068 if newid:
1069 self.setID( newid )
1070
1072 """Remove ourselves from the graph and delete our connections"""
1073
1074
1075 try:
1076
1077 pass
1078 except (AttributeError,ReferenceError):
1079 pass
1080
1082 """Use our id as string or the default implementation"""
1083 if self.id() is not None:
1084 return str( self.id() )
1085
1086 return super( NodeBase, self ).__str__( )
1087
1088
1089
1091 """Create a copy of self and return it
1092
1093 :note: override by subclass - the __init__ methods shuld do the rest"""
1094 return self.__class__( id = self.id() )
1095
1096 - def copyFrom( self, other, add_to_graph = True ):
1097 """Just take the graph from other, but do not ( never ) duplicate it
1098
1099 :param add_to_graph: if true, the new node instance will be added to the graph of
1100 :note: default implementation does not copy plug caches ( which are stored in
1101 the node dict - this is because a reevaluate is usually required on the
1102 duplicated node"""
1103 self.setID( other.id() )
1104 if add_to_graph and other.graph:
1105 other.graph.addNode( self )
1106
1107
1108
1109
1111 """Called whenever a plug needs computation as the value its value is not
1112 cached or marked dirty ( as one of the inputs changed )
1113
1114 :param plug: the static plug instance that requested which requested the computation.
1115 It is the instance you defined on the class
1116 :param mode: the mode of operation. Its completely up to the superclasses how that
1117 attribute is going to be used
1118 :note: to be implemented by superclass """
1119 raise NotImplementedError( "To be implemented by subclass" )
1120
1121
1122
1123
1124 - def setID( self, newID ):
1125 """Set id of this node to newiD
1126 :return: previously assigned id"""
1127 curid = self.id()
1128 self._id = newID
1129 return curid
1130
1132 """:return: ID of this instance"""
1133 return self._id
1134
1135
1136
1137
1139 """:return: list of shells made from plugs and our node"""
1140
1141 outlist = list()
1142 for plug in plugs:
1143 outlist.append( self.toShell( plug ) )
1144 return outlist
1145
1147 """:return: a plugshell as suitable to for this class"""
1148 return getattr( self, 'shellcls' )( self, plug )
1149
1151 """Clear the cache of all plugs on this node - this basically forces it
1152 to recompute the next time an output plug is being queried"""
1153 for plug in self.plugs( ):
1154 self.toShell( plug ).clearCache( clear_affected = False )
1155
1156 @classmethod
1158 """:return: list of static plugs as defined on this node - they are class members
1159 :param predicate: return static plug only if predicate is true
1160 :note: Use this method only if you do not have an instance - there are nodes
1161 that actually have no static plug information, but will dynamically generate them.
1162 For this to work, they need an instance - thus the plugs method is an instance
1163 method and is meant to be the most commonly used one."""
1164 pred = lambda m: isinstance( m, plug )
1165
1166
1167 pluggen = ( m[1] for m in inspect.getmembers( cls, predicate = pred ) if predicate( m[1] ) )
1168 return list( pluggen )
1169
1170 - def plugs( self, predicate = lambda x: True ):
1171 """:return: list of dynamic plugs as defined on this node - they are usually retrieved
1172 on class level, but may be overridden on instance level
1173 :param predicate: return static plug only if predicate is true"""
1174
1175
1176
1177 all_dict_holders = itertools.chain( ( self, ), self.__class__.mro() )
1178 all_dicts = ( instance.__dict__ for instance in all_dict_holders )
1179 pluggen = ( v for d in all_dicts for v in d.itervalues() if isinstance( v, plug ) and predicate( v ) )
1180
1181 return list( pluggen )
1182
1183 @classmethod
1188
1193
1194 @classmethod
1196 """:return: list of static plugs suitable to deliver output
1197 :note: convenience method"""
1198 return cls.plugsStatic( predicate = lambda p: p.providesOutput(), **kwargs )
1199
1201 """:return: list of plugs suitable to deliver output
1202 :note: convenience method"""
1203 return self.plugs( predicate = lambda p: p.providesOutput(), **kwargs )
1204
1206 """:return: Tuples of input shells defining a connection of the given type from
1207 tuple( InputNodeOuptutShell, OurNodeInputShell ) for input connections and
1208 tuple( OurNodeOuptutShell, OutputNodeInputShell )
1209 :param inpt: include input connections to this node
1210 :param output: include output connections ( from this node to others )"""
1211 outConnections = list()
1212 plugs = self.plugs()
1213
1214 if inpt:
1215 shells = self.toShells( ( p for p in plugs if p.providesInput() ) )
1216 for shell in shells:
1217 ishell = shell.input( )
1218 if ishell:
1219 outConnections.append( ( ishell, shell ) )
1220
1221
1222
1223
1224 if output:
1225 shells = self.toShells( ( p for p in plugs if p.providesOutput() ) )
1226 for shell in shells:
1227 outConnections.extend( ( ( shell, oshell ) for oshell in shell.outputs() ) )
1228
1229
1230 return outConnections
1231
1232 @classmethod
1233 - def filterCompatiblePlugs( cls, plugs, attrOrValue, raise_on_ambiguity = False, attr_affinity = False,
1234 attr_as_source=True ):
1235 """:return: sorted list of (rate,plug) tuples suitable to deal with the given attribute.
1236 Thus they could connect to it as well as get their value set.
1237 Most suitable plug comes first.
1238 Incompatible plugs will be pruned.
1239 :param attrOrValue: either an attribute or the value you would like to set to the
1240 attr at the plug in question.
1241 :param raise_on_ambiguity: if True, the method raises if a plug has the same
1242 rating as another plug already on the output list, thus it's not clear anymore
1243 which plug should handle a request
1244 :param attr_affinity: if True, it will not check connection affinity, but attribute
1245 affinity only. It checks how compatible the attributes of the plugs are, disregarding
1246 whether they can be connected or not
1247 Only valid if attrOrValue is an attribute
1248 :param attr_as_source: if True, attrOrValue will be treated as the source of a connection or
1249 each plug would need to take its values.
1250 if False, attrOrValue is the destination of a connection and it needs to take values of the given plugs
1251 or they would connect to it. Only used if attrOrValue is an attribute.
1252 :raise TypeError: if ambiguous input was found"""
1253
1254 attribute = None
1255 value = attrOrValue
1256 if isinstance( attrOrValue, Attribute ):
1257 attribute = attrOrValue
1258
1259 outSorted = list()
1260 for plug in plugs:
1261
1262 if attribute:
1263 sourceattr = attribute
1264 destinationattr = plug.attr
1265 if not attr_as_source:
1266 destinationattr = attribute
1267 sourceattr = plug.attr
1268
1269 if attr_affinity:
1270 rate = destinationattr.affinity( sourceattr )
1271 else:
1272 rate = sourceattr.connectionAffinity( destinationattr )
1273
1274
1275 else:
1276 rate = plug.attr.compatabilityRate( value )
1277
1278
1279 if not rate:
1280 continue
1281
1282 outSorted.append( ( rate, plug ) )
1283
1284
1285 outSorted.sort()
1286 outSorted.reverse()
1287
1288 if raise_on_ambiguity:
1289 ratemap = dict()
1290 for rate,plug in outSorted:
1291 ratemap.setdefault( rate, list() ).append( plug )
1292
1293 report = ""
1294 for rate, pluglist in ratemap.iteritems( ):
1295 if len( pluglist ) > 1:
1296 report += "Rate: %i :" % rate
1297 for plug in pluglist:
1298 report += "\n%s" % str(plug)
1299
1300
1301 if report:
1302 report = "Ambiguous plugs found\n" + report
1303 raise TypeError( report )
1304
1305
1306 return outSorted
1307
1308
1309