Package mrv :: Module dge
[hide private]
[frames] | no frames]

Source Code for Module mrv.dge

   1  # -*- coding: utf-8 -*- 
   2  """Contains a simple but yet powerful dependency graph engine allowing computations 
   3  to be organized more efficiently. 
   4  """ 
   5  __docformat__ = "restructuredtext" 
   6   
   7  import networkx as nx 
   8  from collections import deque 
   9  import inspect 
  10  import weakref 
  11  import itertools 
  12  from util import iDuplicatable 
  13   
  14  __all__ = ("ConnectionError", "PlugIncompatible", "PlugAlreadyConnected", "AccessError", 
  15             "NotWritableError", "NotReadableError", "MissingDefaultValueError", "ComputeError",  
  16             "ComputeFailed", "ComputeFailed", "PlugUnhandled",  
  17             "iterShells", "Attribute", "iPlug", "plug", "Graph", "NodeBase") 
18 19 ##################### 20 ## EXCEPTIONS ###### 21 ################### 22 #{ Exceptions 23 24 -class ConnectionError( Exception ):
25 """Exception base for all plug related errors"""
26
27 -class PlugIncompatible( ConnectionError, TypeError ):
28 """Thrown whenever plugs are not compatible with each other during connection"""
29
30 -class PlugAlreadyConnected( ConnectionError ):
31 """Thrown if one tries to connect a plug to otherplug when otherplug is already connected"""
32
33 -class AccessError( Exception ):
34 """Base class for all errors indicating invalid access"""
35
36 -class NotWritableError( AccessError ):
37 """Thrown if a non-writable plug is being written to"""
38
39 -class NotReadableError( AccessError ):
40 """Thrown if a non-readable attribute is being read"""
41
42 -class MissingDefaultValueError( AccessError ):
43 """Thrown if a default value is missing for input attributes that are not connected"""
44
45 -class ComputeError( Exception ):
46 """Thrown if the computation done by a plug failed by an unknown exception 47 It will be passed on in the exception"""
48
49 -class ComputeFailed( ComputeError ):
50 """Raised by the derived class computing a value if the computational goal 51 cannot be achieved ( anymore )"""
52
53 -class PlugUnhandled( ComputeError ):
54 """Raised if a plug was not handled by the node's compute method"""
55
56 #} END exceptions 57 58 59 60 ##################### 61 ## Iterators ###### 62 ################### 63 #{ Iterators 64 -def iterShells( rootPlugShell, stopAt = lambda x: False, prune = lambda x: False, 65 direction = "up", visit_once = False, branch_first = False ):
66 """Iterator starting at rootPlugShell going "up"stream ( input ) or "down"stream ( output ) 67 breadth first over plugs, applying filter functions as defined. 68 69 :param rootPlugShell: shell at which to start the traversal. The root plug will be returned as well 70 :param stopAt: if function returns true for given PlugShell, iteration will not proceed 71 at that point ( possibly continuing at other spots ). Function will always be called, even 72 if the shell would be pruned as well. The shell serving as stop marker will not be returned 73 :param prune: if function returns true for given PlugShell, the shell will not be returned 74 but iteration continues. 75 :param direction: traversal direction 76 "up" upstream, in direction of inputs of plugs 77 "down" downstream, in direction of outputs of plugs 78 :param visit_once: if True, plugs will only be returned once, even though they are 79 :param branch_first: if True, individual branches will be travelled first ( thuse the node will be left quickly following the datastream ). 80 If False, the plugs on the ndoe will be returned first before proceeding to the next node 81 encountered several times as several noodes are connected to them in some way. """ 82 visited = set() 83 stack = deque() 84 stack.append( rootPlugShell ) 85 86 def addToStack( node, stack, lst, branch_first ): 87 if branch_first: 88 stack.extend( node.toShell( plug ) for plug in lst ) 89 else: 90 reviter = ( node.toShell( lst[i] ) for i in range( len( lst )-1,-1,-1) ) 91 stack.extendleft( reviter )
92 # END addToStack local method 93 94 def addOutputToStack( stack, lst, branch_first ): 95 if branch_first: 96 stack.extend( lst ) 97 else: 98 stack.extendleft( reversed( lst[:] ) ) 99 # END addOutputToStack local method 100 101 while stack: 102 shell = stack.pop() 103 if shell in visited: 104 continue 105 106 if visit_once: 107 visited.add( shell ) 108 109 if stopAt( shell ): 110 continue 111 112 if not prune( shell ): 113 yield shell 114 115 if direction == 'up': 116 # I-N-O 117 addToStack( shell.node, stack, shell.plug.affectedBy(), branch_first ) 118 # END if provides output 119 120 # O<-I 121 ishell = shell.input( ) 122 if ishell: 123 if branch_first: 124 stack.append( ishell ) 125 else: 126 stack.appendleft( ishell ) 127 # END has input connection 128 # END upstream 129 else: 130 # I-N-O and I->O 131 # could also be connected - follow them 132 if branch_first: 133 # fist the outputs, then the internals ( this ends up with the same effect ) 134 addToStack( shell.node, stack, shell.plug.affected(), branch_first ) 135 addOutputToStack( stack, shell.outputs(), branch_first ) 136 else: 137 addOutputToStack( stack, shell.outputs(), branch_first ) 138 addToStack( shell.node, stack, shell.plug.affected(), branch_first ) 139 # END downstream
140 # END for each shell on work stack 141 142 #} END iterators 143 144 145 ##################### 146 ## Classes ###### 147 ################### 148 149 150 #{ END Plugs and Attributes 151 152 -class Attribute( object ):
153 """Simple class defining the type of a plug and several flags that 154 affect it. Additionally it can determine how well suited another attribute is 155 156 **Flags**: 157 exact_type: if True, derived classes of our typecls are not considered to be a valid type. 158 i.e: basestring could be stored in a str attr if exact type is false - its less than we need, but 159 still something. 160 Putting a str into a basestring attribute will always work though, as it would be more than we need 161 readonly: if True, the attribute's plug cannot be written to. Read-only attributes can be used 162 as storage that the user can read, but not write. 163 You can write read-only plugs by directly setting its cache - this of course - is only 164 for the node itself, but will never be done by the framework 165 166 **computable**: 167 Nodes are automatically computable if they are affected by another plug. 168 If this is not the case, they are marked input only and are not computed. 169 If this flag is true, even unaffeted plugs are computable. 170 Plugs that affect something are automatically input plugs and will not be computed. 171 If the plug does not affect anything and this flag is False, they are seen as input plugs 172 anyway. 173 174 The system does not allow plugs to be input and output plugs at the same time, thus your compute 175 cannot be triggered by your own compute 176 177 cls: if True, the plug requires classes to be set ( instances of 'type' ) , but no instances of these classes 178 uncached: if False, computed values may be cached, otherwise they will always be recomputed. 179 unconnectable: if True, the node cannot be the destination of a connection 180 check_passing_values: check each value as it flows through a connection - usually compatability is only checked 181 on connection and once values are set, but not if they flow through an existing connection 182 183 **Default Values**: 184 Although default values can be simple primitives are classes, a callable is specifically supported. 185 It allows you to get a callback whenever a default value is required. 186 The same result could be achieved by connected the plug in question, but dynamic defaults are a quick 187 way to achive that. 188 Your returned value will be type-checked against the required type if check_passing_values is set.""" 189 kNo, kGood, kPerfect = 0, 127, 255 # specify how good attributes fit together 190 exact_type, readonly, computable, cls, uncached, unconnectable,check_passing_values = ( 1, 2, 4, 8, 16, 32, 64 ) 191
192 - def __init__( self, typeClass, flags, default = None ):
193 self.typecls = typeClass 194 self.flags = flags # used for bitflags describing mode 195 self._default = default 196 197 # check default value for compatability ! 198 if default is not None: 199 if not hasattr( default, '__call__' ) and self.compatabilityRate( default ) == 0: 200 raise TypeError( "Default value %r is not compatible with this attribute" % default )
201 # END default type check 202
203 - def _getClassRating( self, cls, exact_type ):
204 """ compute class rating 205 206 :return: rating based on value being a class and compare. 207 0 means there is no type compatability, 255 matches comparecls, or linearly 208 less if is just part of the mro of value 209 """ 210 if not isinstance( cls, type ): 211 return 0 212 213 mro = self.typecls.mro() 214 mro.reverse() 215 216 if not cls in mro: 217 # if we are in the classes mr, then we can perfectly store the class 218 # as it is more than we need 219 if not exact_type and self.typecls in cls.mro(): 220 return self.kPerfect 221 return 0 222 # END simple mro checking 223 224 if len( mro ) == 1: 225 return self.kPerfect 226 227 rate = ( float( mro.index( cls ) ) / float( len( mro ) - 1 ) ) * self.kPerfect 228 229 if exact_type and rate != self.kPerfect: # exact type check 230 return 0 231 232 return rate
233 234 #{ Interface 235
236 - def affinity( self, otherattr ):
237 """Compute affinity for otherattr. 238 239 :return: 240 rating from 0 to 255 defining how good the attribtues match each 241 other in general - how good can we store values of otherattr ? 242 Thus this comparison is directed. 243 244 :note: for checking connections, use `connectionAffinity`""" 245 # see whether our class flags match 246 if self.flags & self.cls != otherattr.flags & self.cls: 247 return 0 248 249 # DEFAULT VALUE CHECK 250 ####################### 251 # see whether we destination can handle our default value - if not 252 # just go for a class comparison 253 rate = self.kNo 254 try: 255 defvalue = otherattr.default() 256 rate = self.compatabilityRate( defvalue ) 257 except (MissingDefaultValueError,TypeError): 258 rate = self._getClassRating( otherattr.typecls, self.flags & self.exact_type ) 259 # finally check how good our types match 260 261 return rate
262
263 - def connectionAffinity( self, destinationattr ):
264 """Compute connection affinity for given destination attribute 265 266 :return: 267 rating from 0 to 255 defining the quality of the connection to 268 otherplug. an affinity of 0 mean connection is not possible, 255 mean the connection 269 is perfectly suited. 270 The connection is a directed one from self -> otherplug """ 271 if destinationattr.flags & self.unconnectable: # destination must be connectable 272 return 0 273 274 # how good can the destination attr deal with us ? 275 return destinationattr.affinity( self )
276
277 - def compatabilityRate( self, value ):
278 """Compute value's compatability rate 279 280 :return: value between 0 and 255, 0 means no compatability, 255 a perfect match. 281 if larger than 0, the plug can hold the value ( assumed the flags are set correctly ). """ 282 if isinstance( value, type ): 283 # do we need a class ? 284 if not self.flags & self.cls: 285 return 0 # its a class 286 287 # check compatability 288 return self._getClassRating( value, self.flags & self.exact_type ) 289 # END is class type 290 else: 291 if not self.flags & self.cls: 292 return self._getClassRating( value.__class__, self.flags & self.exact_type ) 293 # END is instance type 294 295 return 0
296
297 - def default( self ):
298 """:return: default value stored for this attribute, or raise 299 :note: handles dynamic defaults, so you should not directly access the default member variable 300 :raise MissingDefaultValueError: if attribute does not have a default value 301 :raise TypeError: if value returned by dynamic attribute has incorrect type""" 302 if self._default is None: 303 raise MissingDefaultValueError( "Attribute %r has no default value" % self ) 304 305 # DYNAMIC ATTRIBUTES 306 ###################### 307 if hasattr( self._default, '__call__' ): 308 default = self._default() 309 if self.flags & self.check_passing_values and self.compatabilityRate( default ) == 0: 310 raise TypeError( "Dynamic default value had incorrect type: %s" % type( default ) ) 311 return default 312 # END dynamic default handling 313 314 # normal static default 315 return self._default
316
317 318 #} END interface 319 320 321 322 -class iPlug( object ):
323 """Defines an interface allowing to compare compatabilies according to types. 324 325 Plugs can either be input plugs or output plugs - output plugs affect no other 326 plug on a node, but are affected by 0 or more plugs . 327 328 By convention, a plug has a name - that name must also be the name of the 329 member attribute that stores the plag. Plugs, possibly different instances of it, 330 need to be re-retrieved on freshly duplicated nodes to allow graph duplication to 331 be done properly 332 333 :note: if your plug class supports the ``setName`` method, a metaclass will 334 adjust the name of your plug to match the name it has in the parent class 335 """ 336 kNo,kGood,kPerfect = ( 0, 127, 255 ) 337 338 #{ Base Implementation
339 - def __str__( self ):
340 return self.name()
341 342 #} END base implementation 343 344 345 #{ Interface
346 - def name( self ):
347 """:return: name of the plug ( the name that identifies it on the node""" 348 raise NotImplementedError( "Implement this in subclass" )
349
350 - def affects( self, otherplug ):
351 """Set an affects relation ship between this plug and otherplug, saying 352 that this plug affects otherplug.""" 353 raise NotImplementedError( "Implement this in subclass" )
354
355 - def affected( self ):
356 """:return: tuple containing affected plugs ( plugs that are affected by our value )""" 357 raise NotImplementedError( "Implement this in subclass" )
358
359 - def affectedBy( self ):
360 """:return: tuple containing plugs that affect us ( plugs affecting our value )""" 361 raise NotImplementedError( "Implement this in subclass" )
362
363 - def providesOutput( self ):
364 """:return: True if this is an output plug that can trigger computations""" 365 raise NotImplementedError( "Implement this in subclass" )
366
367 - def providesInput( self ):
368 """:return: True if this is an input plug that will never cause computations""" 369 raise NotImplementedError( "Implement this in subclass" )
370
371 #} END interface 372 373 374 -class plug( iPlug ):
375 """Defines an interface allowing to compare compatabilies according to types. 376 377 Plugs are implemented as descriptors, thus they will be defined on node class 378 level, and all static information will remain static 379 380 As descriptors, they are defined statically on the class, and some additional information 381 such as connectivity, is stored on the respective class instance. These special methods 382 are handled using `NodeBase` class 383 384 Plugs are implemented as descriptors as all type information can be kept per class, 385 whereas only connection information changes per node instance. 386 387 Plugs can either be input plugs or output plugs - output plugs affect no other 388 plug on a node, but are affected by 0 or more plugs 389 390 :note: class is lowercase as it is used as descriptor ( acting more like a function ) 391 """ 392 kNo,kGood,kPerfect = ( 0, 127, 255 ) 393 394 #{ Overridden object methods
395 - def __init__( self, attribute ):
396 """Intialize the plug with a distinctive name""" 397 self._name = None 398 self.attr = attribute 399 self._affects = list() # list of plugs that are affected by us 400 self._affectedBy = list() # keeps record of all plugs that affect us
401 402 #} END object overridden methods 403 404 #{ Value access 405
406 - def __get__( self, obj, cls=None ):
407 """A value has been requested - return our plugshell that brings together 408 both, the object and the static plug""" 409 # in class mode we return ourselves for access 410 if obj is not None: 411 return obj.toShell( self ) 412 413 # class attributes just return the descriptor itself for direct access 414 return self 415 416 417 #def __set__( self, obj, value ): 418 """We do not use a set method, allowing to override our descriptor through 419 actual plug instances in the instance dict. Once deleted, we shine through again"""
420 # raise AssertionError( "To set this value, use the node.plug.set( value ) syntax" ) 421 # obj.toShell( self ).set( value ) 422 423 #} value access 424 425
426 - def name( self ):
427 """:return: name of plug""" 428 return self._name
429
430 - def setName( self, name ):
431 """Set the name of this plug - can be set only once""" 432 if not self._name: 433 self._name = name 434 else: 435 raise ValueError( "The name of the plug can only be set once" )
436
437 - def affects( self, otherplug ):
438 """Set an affects relation ship between this plug and otherplug, saying 439 that this plug affects otherplug.""" 440 if otherplug not in self._affects: 441 self._affects.append( otherplug ) 442 443 if self not in otherplug._affectedBy: 444 otherplug._affectedBy.append( self )
445
446 - def affected( self ):
447 """:return: tuple containing affected plugs ( plugs that are affected by our value )""" 448 return tuple( self._affects )
449
450 - def affectedBy( self ):
451 """:return: tuple containing plugs that affect us ( plugs affecting our value )""" 452 return tuple( self._affectedBy )
453
454 - def providesOutput( self ):
455 """:return: True if this is an output plug that can trigger computations""" 456 return bool( len( self.affectedBy() ) != 0 or self.attr.flags & Attribute.computable )
457
458 - def providesInput( self ):
459 """:return: True if this is an input plug that will never cause computations""" 460 #return len( self._affects ) != 0 and not self.providesOutput( ) 461 return not self.providesOutput() # previous version did not recognize storage plugs as input
462
463 464 #} END plugs and attributes 465 466 467 -class _PlugShell( tuple ):
468 """Handles per-node-instance plug connection setup and storage. As plugs are 469 descriptors and thus an instance of the class, per-node-instance information needs 470 special treatment. 471 This class is being returned whenever the descriptors get and set methods are called, 472 it contains information about the node and the plug being involved, allowing to track 473 connection info directly using the node dict 474 475 This allows plugs to be connected, and information to flow through the dependency graph. 476 Plugs never act alone since they always belong to a parent node that will be asked for 477 value computations if the value is not yet cached. 478 :note: Do not instantiate this class youself, it must be created by the node as different 479 node types can use different versions of this shell""" 480 481 #{ Object Overrides 482
483 - def __new__( cls, *args ):
484 return tuple.__new__( cls, args )
485
486 - def __getattr__( self, attr ):
487 """Allow easy attribute access while staying memory efficient""" 488 if attr == 'node': 489 return self[0] 490 if attr == 'plug': 491 return self[1] 492 493 # let it raise the typical error 494 return super( _PlugShell, self ).__getattribute__( attr )
495
496 - def __repr__ ( self ):
497 return "%s.%s" % ( self.node, self.plug )
498
499 - def __str__( self ):
500 return repr( self )
501 502 #} END object overrides 503 504 505 #{ Values 506
507 - def get( self, mode = None ):
508 """:return: value of the plug 509 :param mode: optional arbitary value specifying the mode of the get attempt""" 510 if self.hasCache( ): 511 return self.cache( ) 512 513 # Output plugs compute values 514 if self.plug.providesOutput( ): 515 # otherwise compute the value 516 try: 517 result = self.node.compute( self.plug, mode ) 518 except ComputeError,e: 519 raise ComputeError( "%s->%s" % ( repr( self ), str( e ) ) ) 520 except Exception: # except all - this is an unknown excetion - just pass it on, keeping the origin 521 raise 522 523 if result is None: 524 raise AssertionError( "Plug %s returned None - check your node implementation" % ( str( self ) ) ) 525 # END result check 526 527 # try to cache computed values 528 self.setCache( result ) 529 return result 530 # END plug provides output 531 elif self.plug.providesInput( ): # has to be separately checked 532 # check for connection 533 inputshell = self.input() 534 if not inputshell: 535 # check for default value 536 try: 537 return self.plug.attr.default() 538 except ( TypeError, MissingDefaultValueError ),e: 539 raise MissingDefaultValueError( "Plug %r failed to getrieve its default value and is not connected" % repr( self ), e ) 540 # END if we have no input 541 542 # query the connected plug for the value 543 value = inputshell.get( mode ) 544 if self.plug.attr.flags & Attribute.check_passing_values: 545 if not self.plug.attr.compatabilityRate( value ): 546 raise TypeError( "Value coming from input %s is not compatible with %s" % ( str( inputshell ), str( self ) ) ) 547 548 return value 549 # END plug provides input 550 551 raise AssertionError( "Plug %s did not provide any output or input!" % repr( self ) )
552 553 554
555 - def set( self, value, ignore_connection = False ):
556 """Set the given value to be used in our plug 557 :param ignore_connection: if True, the plug can be destination of a connection and 558 will still get its value set - usually it would be overwritten by the value form the 559 connection. The set value will be cleared if something upstream in it's connection chain 560 changes. 561 :raise AssertionError: the respective attribute must be cached, otherwise 562 the value will be lost""" 563 flags = self.plug.attr.flags 564 if flags & Attribute.readonly: 565 raise NotWritableError( "Plug %r is not writable" % repr(self) ) 566 567 if self.plug.providesOutput( ): 568 raise NotWritableError( "Plug %r is not writable as it provides an output itself" % repr(self) ) 569 570 if flags & Attribute.uncached: 571 raise AssertionError( "Writable attributes must be cached - otherwise the value will not be held" ) 572 573 # check connection 574 if not ignore_connection and self.input() is not None: 575 raise NotWritableError( "Plug %r is connected to %r and thus not explicitly writable" % ( self, self.input() ) ) 576 577 self.setCache( value )
578 579
580 - def compatabilityRate( self, value ):
581 """Compute compatability rate for teh given value 582 583 :return: value between 0 and 255, 0 means no compatability, 255 a perfect match 584 if larger than 0, the plug can hold the value ( assumed the flags are set correctly ) 585 """ 586 return self.plug.attr.compatabilityRate( value )
587 588 589 #} END values 590 591 #{ Connections 592
593 - def connect( self, otherplugshell, **kwargs ):
594 """Connect this plug to otherplugshell such that otherplugshell is an input plug for our output 595 596 :param kwargs: everything supported by `Graph.connect` 597 :return: self on success, allows chained connections 598 :raise PlugAlreadyConnected: if otherplugshell is connected and force is False 599 :raise PlugIncompatible: if otherplugshell does not appear to be compatible to this one""" 600 if not isinstance( otherplugshell, _PlugShell ): 601 raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) ) 602 603 return self.node.graph.connect( self, otherplugshell, **kwargs )
604 605
606 - def disconnect( self, otherplugshell ):
607 """Remove the connection to otherplugshell if we are connected to it. 608 :note: does not raise if no connection is present""" 609 if not isinstance( otherplugshell, _PlugShell ): 610 raise AssertionError( "Invalid Type given to connect: %r" % repr( otherplugshell ) ) 611 612 return self.node.graph.disconnect( self, otherplugshell )
613
614 - def input( self, predicate = lambda shell: True ):
615 """:return: the connected input plug or None if there is no such connection 616 :param predicate: plug will only be returned if predicate is true for it 617 :note: input plugs have on plug at most, output plugs can have more than one 618 connected plug""" 619 sourceshell = self.node.graph.input( self ) 620 if sourceshell and predicate( sourceshell ): 621 return sourceshell 622 return None
623
624 - def outputs( self, predicate = lambda shell: True ):
625 """:return: a list of plugs being the destination of the connection 626 :param predicate: plug will only be returned if predicate is true for it - shells will be passed in """ 627 return self.node.graph.outputs( self, predicate = predicate )
628
629 - def connections( self, inpt, output, predicate = lambda shell: True ):
630 """:return: get all input and or output connections from this shell 631 or to this shell as edges ( sourceshell, destinationshell ) 632 :param predicate: return true for each destination shell that you can except in the 633 returned edge or the sourceshell where your shell is the destination. 634 :note: Use this method to get edges read for connection/disconnection""" 635 outcons = list() 636 if inpt: 637 sourceshell = self.input( predicate = predicate ) 638 if sourceshell: 639 outcons.append( ( sourceshell, self ) ) 640 # END input connection handling 641 642 if output: 643 outcons.extend( ( self, oshell ) for oshell in self.outputs( predicate = predicate ) ) 644 645 return outcons
646
647 - def isConnected( self ):
648 """:return: True, if the shell is connected as source or as destination of a connection""" 649 return self.input() or self.outputs()
650
651 - def iterShells( self, **kwargs ):
652 """Iterate plugs and their connections starting at this plug 653 :return: generator for plug shells 654 :note: supports all options of `iterShells`, this method allows syntax like: 655 node.outAttribute.iterShells( )""" 656 return iterShells( self, **kwargs )
657 658 #} END connections 659 660 661 #{Caching
662 - def _cachename( self ):
663 return self.plug.name() + "_c"
664
665 - def hasCache( self ):
666 """:return: True if currently store a cached value""" 667 return hasattr( self.node, self._cachename() )
668
669 - def setCache( self, value ):
670 """Set the given value to be stored in our cache 671 :raise: TypeError if the value is not compatible to our defined type""" 672 # attr compatability - always run this as we want to be warned if the compute 673 # method returns a value that does not match 674 if self.plug.attr.compatabilityRate( value ) == 0: 675 raise TypeError( "Plug %r cannot hold value %r as it is not compatible" % ( repr( self ), repr( value ) ) ) 676 677 if self.plug.attr.flags & Attribute.uncached: 678 return 679 680 # our cache changed - dirty downstream plugs - thus clear the cache 681 # NOTE: this clears our own cache by deleting it, but we re-set it 682 self.clearCache( clear_affected = True ) 683 setattr( self.node, self._cachename(), value )
684
685 - def cache( self ):
686 """:return: the cached value or raise 687 :raise ValueError:""" 688 if self.hasCache(): 689 return getattr( self.node, self._cachename() ) 690 691 raise ValueError( "Plug %r did not have a cached value" % repr( self ) )
692
693 - def clearCache( self, clear_affected = False, cleared_shells_set = None ):
694 """Empty the cache of our plug 695 :param clear_affected: if True, the caches of our affected plugs ( connections 696 or affects relations ) will also be cleared 697 This operation is recursive, and needs to be as different shells on different nodes 698 might do things differently. 699 :param cleared_shells_set: if set, it can be used to track which plugs have already been dirtied to 700 prevent recursive loops 701 Propagation will happen even if we do not have a cache to clear ourselves """ 702 if self.hasCache(): 703 del( self.node.__dict__[ self._cachename() ] ) 704 705 if clear_affected: 706 # our cache changed - dirty downstream plugs - thus clear the cache 707 if not cleared_shells_set: # initialize our tracking list 708 cleared_shells_set = set() 709 710 if self in cleared_shells_set: 711 return 712 713 cleared_shells_set.add( self ) # assure we do not come here twice 714 715 all_shells = itertools.chain( self.node.toShells( self.plug.affected() ), self.outputs() ) 716 for shell in all_shells: 717 shell.clearCache( clear_affected = True, cleared_shells_set = cleared_shells_set )
718 # END for each shell in all_shells to clear 719 #} END caching 720 721 722 #{ Name Overrides 723 __rshift__ = lambda self,other: self.connect( other, force=True )
724 # NOTE: this will cause problems when sorting them :) - so lets just use >> for the
725 # forced connection ! 726 # __gt__ = lambda self,other: self.connect( other, force=False ) 727 728 #} END name overrides 729 730 731 732 -class Graph( nx.DiGraph, iDuplicatable ):
733 """Holds the nodes and their connections 734 735 Nodes are kept in a separate list whereas the plug connections are kept 736 in the underlying DiGraph""" 737 738 #{ Overridden Object Methods
739 - def __init__( self, **kwargs ):
740 """initialize the DiGraph and add some additional attributes""" 741 super( Graph, self ).__init__( **kwargs ) 742 self._nodes = set() # our processes from which we can make connections
743
744 - def __del__( self ):
745 """Clear our graph""" 746 self.clear() # clear connections 747 748 # NOTE : nodes will remove themselves once they are not referenced anymore 749 self._nodes.clear()
750 751
752 - def __getattr__( self , attr ):
753 """Allows access to nodes by name just by accessing the graph directly""" 754 try: 755 return self.nodeByID( attr ) 756 except NameError: 757 return super( Graph, self ).__getattribute__( attr )
758 759 #} END object methods 760 761 #{ Debugging
762 - def writeDot( self , fileOrPath ):
763 """Write the connections in self to the given file object or path 764 :todo: remove if no longer needed""" 765 # associate every plugshell with its node create a more native look 766 writegraph = nx.DiGraph() 767 # but we do not use it as the edge attrs cannot be assigned anymore - dict has no unique keys 768 # writegraph.allow_multiedges() 769 770 # EXTRACT DATA 771 for node in self.iterNodes(): 772 writegraph.add_node( node, color="#ebba66", width=4, height=2, fontsize=22 ) 773 # END for each node in graph 774 775 # now all the connections - just transfer them 776 for sshell,eshell in self.edges_iter(): 777 writegraph.add_edge( sshell,eshell ) 778 779 writegraph.add_edge( sshell.node, sshell, color="#000000" ) 780 781 writegraph.add_node( sshell, color="#000000", label=sshell.plug ) 782 writegraph.add_node( eshell, color="#000000", label=eshell.plug ) 783 784 writegraph.add_edge( eshell,eshell.node, color="#000000" ) 785 # END for each edge in graph 786 787 # WRITE DOT FILE 788 nx.write_dot(writegraph, fileOrPath)
789 790 #} END debugging 791 792 #{ iDuplicatable Interface
793 - def createInstance( self ):
794 """Create a copy of self and return it""" 795 return self.__class__( )
796
797 - def copyFrom( self, other ):
798 """Duplicate all data from other graph into this one, create a duplicate 799 of the nodes as well""" 800 def copyshell( shell, nodemap ): 801 nodecpy = nodemap[ shell.node ] 802 803 # nodecpy - just get the shell of the given name directly - getattr always creates 804 # shells as it is equal to node.plugname 805 return getattr( nodecpy, shell.plug.name() )
806 807 # copy name ( networkx ) 808 self.name = other.name 809 810 # copy nodes first 811 nodemap = dict() 812 for node in other.iterNodes(): 813 nodecpy = node.duplicate( add_to_graph = False ) # copy node 814 nodemap[ node ] = nodecpy 815 # END for each node 816 817 # add all nodemap values as nodes ( now that iteration is done 818 for duplnode in nodemap.itervalues(): 819 self.addNode( duplnode ) 820 821 # COPY CONNECTIONS 822 for sshell,eshell in other.edges_iter(): 823 # make fresh connections through shells - we do not know what kind of 824 # plugs they use, so they could be special and thus need special 825 # copy procedures 826 cstart = copyshell( sshell, nodemap ) 827 cend = copyshell( eshell, nodemap ) 828 829 cstart.connect( cend )
830 # END for each edge( startshell, endshell ) 831 832 833 # END iDuplicatable 834 835 836 #{ Node Handling
837 - def addNode( self, node ):
838 """Add a new node instance to the graph 839 :note: node membership is exclusive, thus node instances 840 can only be in one graph at a time 841 :return: self, for chained calls""" 842 if not isinstance( node, NodeBase ): 843 raise TypeError( "Node %r must be of type NodeBase" % node ) 844 845 # assure we do not remove ( and kill connections ) and re-add to ourselves 846 if node in self._nodes: 847 return self 848 849 # remove node from existing graph 850 if node.graph is not None: 851 node.graph.removeNode( node ) 852 853 854 self._nodes.add( node ) # assure the node knows us 855 node.graph = weakref.proxy( self ) 856 857 return self # assure we have the graph set
858
859 - def removeNode( self, node ):
860 """Remove the given node from the graph ( if it exists in it )""" 861 try: 862 # remove connections 863 for sshell, eshell in node.connections( 1, 1 ): 864 self.disconnect( sshell, eshell ) 865 866 # assure the node does not call us anymore 867 node.graph = None 868 self._nodes.remove( node ) 869 except KeyError: 870 pass
871
872 - def clearCache( self ):
873 """Clear the cache of all nodes in the graph - this forces the graph 874 to reevaluate on the next request""" 875 for node in self._nodes: 876 node.clearCache()
877 878 #} END node handling 879 880 #{ Query 881
882 - def hasNode( self , node ):
883 """:return: True if the node is in this graph, false otherwise""" 884 return node in self._nodes
885
886 - def iterNodes( self, predicate = lambda node: True ):
887 """:return: generator returning all nodes in this graph 888 :param predicate: if True for node, it will be returned 889 :note: there is no particular order""" 890 for node in self._nodes: 891 if predicate( node ): 892 yield node
893 # END for each node 894
895 - def iterConnectedNodes( self, predicate = lambda node: True ):
896 """:return: generator returning all nodes that are connected in this graph, 897 in no particular order. 898 For an ordered itereration, use `iterShells`. 899 900 :param predicate: if True for node, it will be returned""" 901 # iterate digraph keeping the plugs only ( and thus connected nodes ) 902 nodes_seen = set() 903 for node,plug in self.nodes_iter(): 904 if node in nodes_seen: 905 continue 906 nodes_seen.add( node ) 907 if predicate( node ): 908 yield node
909 # END for each node 910
911 - def nodes( self ):
912 """:return: immutable copy of the nodes used in the graph""" 913 return tuple( self._nodes )
914
915 - def numNodes( self ):
916 """:return: number of nodes in the graph""" 917 return len( self._nodes )
918
919 - def nodeByID( self, nodeID ):
920 """:return: instance of a node according to the given node id 921 :raise NameError: if no such node exists in graph""" 922 for node in self.iterNodes(): 923 if node.id() == nodeID: 924 return node 925 926 raise NameError( "Node with ID %s not found in graph" % nodeID )
927 928 929 #} END query 930 931 #{ Connecitons
932 - def connect( self, sourceshell, destinationshell, force = False ):
933 """Connect this plug to destinationshell such that destinationshell is an input plug for our output 934 935 :param sourceshell: PlugShell being source of the connection 936 :param destinationshell: PlugShell being destination of the connection 937 :param force: if False, existing connections to destinationshell will not be broken, but an exception is raised 938 if True, existing connection may be broken 939 :return: self on success, allows chained connections 940 :raise PlugAlreadyConnected: if destinationshell is connected and force is False 941 :raise PlugIncompatible: if destinationshell does not appear to be compatible to this one""" 942 # assure both nodes are known to the graph 943 if not sourceshell.node.graph is destinationshell.node.graph: 944 raise AssertionError( "You cannot connect nodes from different graphs" ) 945 946 self._nodes.add( sourceshell.node ) 947 self._nodes.add( destinationshell.node ) 948 949 # check compatability 950 if sourceshell.plug.attr.connectionAffinity( destinationshell.plug.attr ) == 0: 951 raise PlugIncompatible( "Cannot connect %r to %r as they are incompatible" % ( repr( sourceshell ), repr( destinationshell ) ) ) 952 953 954 oinput = destinationshell.input( ) 955 if oinput is not None: 956 if oinput == sourceshell: 957 return sourceshell 958 959 if not force: 960 raise PlugAlreadyConnected( "Cannot connect %r to %r as it is already connected" % ( repr( sourceshell ), repr( destinationshell ) ) ) 961 962 # break existing one 963 oinput.disconnect( destinationshell ) 964 # END destinationshell already connected 965 966 # connect us 967 self.add_edge( sourceshell, v = destinationshell ) 968 return sourceshell
969
970 - def disconnect( self, sourceshell, destinationshell ):
971 """Remove the connection between sourceshell to destinationshell if they are connected 972 :note: does not raise if no connection is present""" 973 self.remove_edge( sourceshell, v = destinationshell ) 974 975 # also, delete the plugshells if they are not connnected elsewhere 976 for shell in sourceshell,destinationshell: 977 if len( self.neighbors( shell ) ) == 0: 978 self.remove_node( shell )
979
980 - def input( self, plugshell ):
981 """:return: the connected input plug of plugshell or None if there is no such connection 982 :note: input plugs have on plug at most, output plugs can have more than one connected plug""" 983 try: 984 pred = self.predecessors( plugshell ) 985 if pred: 986 return pred[0] 987 except nx.NetworkXError: 988 pass 989 990 return None
991
992 - def outputs( self, plugshell, predicate = lambda x : True ):
993 """:return: a list of plugs being the destination of the connection to plugshell 994 :param predicate: plug will only be returned if predicate is true for it - shells will be passed in """ 995 try: 996 return [ s for s in self.successors( plugshell ) if predicate( s ) ] 997 except nx.NetworkXError: 998 return list()
999
1000 #} END connections 1001 1002 1003 -class _NodeBaseCheckMeta( type ):
1004 """Class checking the consistency of the nodebase class before it is being created"""
1005 - def __new__( metacls, name, bases, clsdict ):
1006 """Check: 1007 - every plugname must correspond to a node member name 1008 """ 1009 newcls = super( _NodeBaseCheckMeta, metacls ).__new__( metacls, name, bases, clsdict ) 1010 1011 # EVERY PLUG NAME MUST MATCH WITH THE ACTUAL NAME IN THE CLASS 1012 # set the name according to its slot name in the parent class 1013 membersdict = inspect.getmembers( newcls ) # do not filter, as plugs could be overridden 1014 try: 1015 if hasattr( newcls, "plugsStatic" ): 1016 for plug in newcls.plugsStatic( ): 1017 for name,member in membersdict: 1018 if member == plug and plug.name() != name: 1019 # try to set it 1020 if hasattr( plug, 'setName' ): 1021 plug.setName( name ) 1022 else: 1023 raise AssertionError( "Plug %r is named %s, but must be named %s as in its class %s" % ( plug, plug.name(), name, newcls ) ) 1024 # END setName special handling 1025 # END if member nanme is wrong 1026 # END for each class member 1027 1028 # ignore plugs we possibly did not find in the physical class 1029 # END for each plug in class 1030 # END if method exists 1031 except TypeError: 1032 # it can be that a subclass overrides this method and makes it an instance method 1033 # this is valid - the rest of the dgengine always accesses this method 1034 # through instance - so we have to handle it 1035 pass 1036 1037 return newcls
1038
1039 1040 1041 1042 -class NodeBase( iDuplicatable ):
1043 """Base class that provides support for plugs to the superclass. 1044 It will create some simple tracking attriubtes required for the plug system 1045 to work 1046 1047 Nodes can compute values of their plugs if these do not have a cache. 1048 1049 Nodes are identified by an ID - the default graph implementation though will 1050 be okay with just having instances. 1051 It is also being used for string representations of this node""" 1052 shellcls = _PlugShell # class used to instantiate new shells 1053 __metaclass__ = _NodeBaseCheckMeta # check the class before its being created 1054 1055 #{ Overridden from Object
1056 - def __init__( self, *args, **kwargs ):
1057 """We require a directed graph to track the connectivity between the plugs. 1058 It must be supplied by the super class and should be as global as required to 1059 connecte the NodeBases together properly. 1060 1061 :param kwargs: 'id' = id of the instance, defaults to None if it is not required 1062 :note: we are super() compatible, and assure our base is initialized correctly""" 1063 self.graph = None 1064 self._id = None 1065 1066 # set id 1067 newid = kwargs.get( 'id', None ) 1068 if newid: 1069 self.setID( newid )
1070
1071 - def __del__( self ):
1072 """Remove ourselves from the graph and delete our connections""" 1073 # check if item does still exist - this is not the case if the graph 1074 # is currently being deleted 1075 try: 1076 #self.graph.removeNode( self ) # TODO: take back in and make it work ! Problems with facade nodes 1077 pass 1078 except (AttributeError,ReferenceError): # .graph could be None 1079 pass
1080
1081 - def __str__( self ):
1082 """Use our id as string or the default implementation""" 1083 if self.id() is not None: 1084 return str( self.id() ) 1085 1086 return super( NodeBase, self ).__str__( )
1087 #} Overridden from Object 1088 1089 #{ iDuplicatable Interface
1090 - def createInstance( self, *args, **kwargs ):
1091 """Create a copy of self and return it 1092 1093 :note: override by subclass - the __init__ methods shuld do the rest""" 1094 return self.__class__( id = self.id() )
1095
1096 - def copyFrom( self, other, add_to_graph = True ):
1097 """Just take the graph from other, but do not ( never ) duplicate it 1098 1099 :param add_to_graph: if true, the new node instance will be added to the graph of 1100 :note: default implementation does not copy plug caches ( which are stored in 1101 the node dict - this is because a reevaluate is usually required on the 1102 duplicated node""" 1103 self.setID( other.id() ) # id copying would create equally named clones for now 1104 if add_to_graph and other.graph: # add ourselves to the graph of the other node 1105 other.graph.addNode( self )
1106 1107 #} END iDuplicatable 1108 1109 #{ Base Interface
1110 - def compute( self, plug, mode ):
1111 """Called whenever a plug needs computation as the value its value is not 1112 cached or marked dirty ( as one of the inputs changed ) 1113 1114 :param plug: the static plug instance that requested which requested the computation. 1115 It is the instance you defined on the class 1116 :param mode: the mode of operation. Its completely up to the superclasses how that 1117 attribute is going to be used 1118 :note: to be implemented by superclass """ 1119 raise NotImplementedError( "To be implemented by subclass" )
1120 1121 #} END base interface 1122 1123 #{ ID Handling
1124 - def setID( self, newID ):
1125 """Set id of this node to newiD 1126 :return: previously assigned id""" 1127 curid = self.id() 1128 self._id = newID 1129 return curid
1130
1131 - def id( self ):
1132 """:return: ID of this instance""" 1133 return self._id
1134 1135 #} END id handling 1136 1137 #{ Base
1138 - def toShells( self, plugs ):
1139 """:return: list of shells made from plugs and our node""" 1140 # may not use it as generator as it binds variables ( of course ! ) 1141 outlist = list() 1142 for plug in plugs: 1143 outlist.append( self.toShell( plug ) ) 1144 return outlist
1145
1146 - def toShell( self, plug ):
1147 """:return: a plugshell as suitable to for this class""" 1148 return getattr( self, 'shellcls' )( self, plug ) # prevent cls variable to be bound !
1149
1150 - def clearCache( self ):
1151 """Clear the cache of all plugs on this node - this basically forces it 1152 to recompute the next time an output plug is being queried""" 1153 for plug in self.plugs( ): 1154 self.toShell( plug ).clearCache( clear_affected = False )
1155 1156 @classmethod
1157 - def plugsStatic( cls, predicate = lambda x: True ):
1158 """:return: list of static plugs as defined on this node - they are class members 1159 :param predicate: return static plug only if predicate is true 1160 :note: Use this method only if you do not have an instance - there are nodes 1161 that actually have no static plug information, but will dynamically generate them. 1162 For this to work, they need an instance - thus the plugs method is an instance 1163 method and is meant to be the most commonly used one.""" 1164 pred = lambda m: isinstance( m, plug ) 1165 1166 # END sanity check 1167 pluggen = ( m[1] for m in inspect.getmembers( cls, predicate = pred ) if predicate( m[1] ) ) 1168 return list( pluggen )
1169
1170 - def plugs( self, predicate = lambda x: True ):
1171 """:return: list of dynamic plugs as defined on this node - they are usually retrieved 1172 on class level, but may be overridden on instance level 1173 :param predicate: return static plug only if predicate is true""" 1174 # the getmembers function appears to be ... buggy with my classes 1175 # use special handling to assure he gets all the instance members AND the class members 1176 # In ipython tests this worked as expected - get the dicts individually 1177 all_dict_holders = itertools.chain( ( self, ), self.__class__.mro() ) 1178 all_dicts = ( instance.__dict__ for instance in all_dict_holders ) 1179 pluggen = ( v for d in all_dicts for v in d.itervalues() if isinstance( v, plug ) and predicate( v ) ) 1180 1181 return list( pluggen )
1182 1183 @classmethod
1184 - def inputPlugsStatic( cls, **kwargs ):
1185 """:return: list of static plugs suitable as input 1186 :note: convenience method""" 1187 return cls.plugsStatic( predicate = lambda p: p.providesInput(), **kwargs )
1188
1189 - def inputPlugs( self, **kwargs ):
1190 """:return: list of plugs suitable as input 1191 :note: convenience method""" 1192 return self.plugs( predicate = lambda p: p.providesInput(), **kwargs )
1193 1194 @classmethod
1195 - def outputPlugsStatic( cls, **kwargs ):
1196 """:return: list of static plugs suitable to deliver output 1197 :note: convenience method""" 1198 return cls.plugsStatic( predicate = lambda p: p.providesOutput(), **kwargs )
1199
1200 - def outputPlugs( self, **kwargs ):
1201 """:return: list of plugs suitable to deliver output 1202 :note: convenience method""" 1203 return self.plugs( predicate = lambda p: p.providesOutput(), **kwargs )
1204
1205 - def connections( self, inpt, output ):
1206 """:return: Tuples of input shells defining a connection of the given type from 1207 tuple( InputNodeOuptutShell, OurNodeInputShell ) for input connections and 1208 tuple( OurNodeOuptutShell, OutputNodeInputShell ) 1209 :param inpt: include input connections to this node 1210 :param output: include output connections ( from this node to others )""" 1211 outConnections = list() 1212 plugs = self.plugs() 1213 # HANDLE INPUT 1214 if inpt: 1215 shells = self.toShells( ( p for p in plugs if p.providesInput() ) ) 1216 for shell in shells: 1217 ishell = shell.input( ) 1218 if ishell: 1219 outConnections.append( ( ishell, shell ) ) 1220 # END for each shell in this node's shells 1221 # END input handling 1222 1223 # HANDLE OUTPUT 1224 if output: 1225 shells = self.toShells( ( p for p in plugs if p.providesOutput() ) ) 1226 for shell in shells: 1227 outConnections.extend( ( ( shell, oshell ) for oshell in shell.outputs() ) ) 1228 # END output handling 1229 1230 return outConnections
1231 1232 @classmethod
1233 - def filterCompatiblePlugs( cls, plugs, attrOrValue, raise_on_ambiguity = False, attr_affinity = False, 1234 attr_as_source=True ):
1235 """:return: sorted list of (rate,plug) tuples suitable to deal with the given attribute. 1236 Thus they could connect to it as well as get their value set. 1237 Most suitable plug comes first. 1238 Incompatible plugs will be pruned. 1239 :param attrOrValue: either an attribute or the value you would like to set to the 1240 attr at the plug in question. 1241 :param raise_on_ambiguity: if True, the method raises if a plug has the same 1242 rating as another plug already on the output list, thus it's not clear anymore 1243 which plug should handle a request 1244 :param attr_affinity: if True, it will not check connection affinity, but attribute 1245 affinity only. It checks how compatible the attributes of the plugs are, disregarding 1246 whether they can be connected or not 1247 Only valid if attrOrValue is an attribute 1248 :param attr_as_source: if True, attrOrValue will be treated as the source of a connection or 1249 each plug would need to take its values. 1250 if False, attrOrValue is the destination of a connection and it needs to take values of the given plugs 1251 or they would connect to it. Only used if attrOrValue is an attribute. 1252 :raise TypeError: if ambiguous input was found""" 1253 1254 attribute = None 1255 value = attrOrValue 1256 if isinstance( attrOrValue, Attribute ): 1257 attribute = attrOrValue 1258 1259 outSorted = list() 1260 for plug in plugs: 1261 1262 if attribute: 1263 sourceattr = attribute 1264 destinationattr = plug.attr 1265 if not attr_as_source: 1266 destinationattr = attribute 1267 sourceattr = plug.attr 1268 1269 if attr_affinity: 1270 rate = destinationattr.affinity( sourceattr ) # how good can dest store source ? 1271 else: 1272 rate = sourceattr.connectionAffinity( destinationattr ) 1273 # END which affinity type 1274 # END attribute rating 1275 else: 1276 rate = plug.attr.compatabilityRate( value ) 1277 # END value rating 1278 1279 if not rate: 1280 continue 1281 1282 outSorted.append( ( rate, plug ) ) 1283 # END for each plug 1284 1285 outSorted.sort() 1286 outSorted.reverse() # high rates first 1287 1288 if raise_on_ambiguity: 1289 ratemap = dict() 1290 for rate,plug in outSorted: 1291 ratemap.setdefault( rate, list() ).append( plug ) 1292 # END for each compatible plug 1293 report = "" 1294 for rate, pluglist in ratemap.iteritems( ): 1295 if len( pluglist ) > 1: 1296 report += "Rate: %i :" % rate 1297 for plug in pluglist: 1298 report += "\n%s" % str(plug) 1299 # END if ambiguous plugs 1300 # END for each rate in ratemap 1301 if report: 1302 report = "Ambiguous plugs found\n" + report 1303 raise TypeError( report ) 1304 # END ambiguous check 1305 1306 return outSorted
1307 1308 #} END base 1309