Package jsondata :: Module JSONDataSerializer
[hide private]
[frames] | no frames]

Source Code for Module jsondata.JSONDataSerializer

  1  # -*- coding:utf-8   -*- 
  2  """Basic features for the persistence of JSON based in-memory data. 
  3  """ 
  4  __author__ = 'Arno-Can Uestuensoez' 
  5  __maintainer__ = 'Arno-Can Uestuensoez' 
  6  __license__ = "Artistic-License-2.0 + Forced-Fairplay-Constraints" 
  7  __copyright__ = "Copyright (C) 2015-2016 Arno-Can Uestuensoez @Ingenieurbuero Arno-Can Uestuensoez" 
  8  __version__ = '0.2.14' 
  9  __uuid__='63b597d6-4ada-4880-9f99-f5e0961351fb' 
 10   
 11  import os,sys 
 12  version = '{0}.{1}'.format(*sys.version_info[:2]) 
 13  if not version in ('2.6','2.7',): # pragma: no cover 
 14      raise Exception("Requires Python-2.6.* or higher") 
 15  # if version < '2.7': # pragma: no cover 
 16  #     raise Exception("Requires Python-2.7.* or higher") 
 17   
 18  # 
 19  # Check whether the application has selected a verified JSON package 
 20  if sys.modules.get('json'): 
 21      import json as myjson #@UnusedImport 
 22  elif sys.modules.get('ujson'): 
 23      import ujson as myjson 
 24  else: 
 25      import json as myjson 
 26   
 27  from jsondata.JSONData import MODE_SCHEMA_OFF,MODE_SCHEMA_DRAFT3,MODE_SCHEMA_DRAFT4 
 28  from jsondata.JSONData import MATCH_NO,MATCH_KEY,MATCH_CHLDATTR,MATCH_INDEX,MATCH_MEM 
 29   
 30  # Sets display for inetractive JSON/JSONschema design. 
 31  _interactive = False 
 32   
 33  # generic exceptions for 'jsondata' 
 34  from jsondata.JSONDataExceptions import JSONDataException,JSONDataValue,JSONDataSourceFile,JSONDataTargetFile 
 35  from jsondata.JSONData import JSONData,JSONDataAmbiguity 
 36   
37 -class JSONDataSerializer(JSONData):
38 """Persistency of JSON based data for the class jsondata.JSONData. 39 40 This class provides for persistency of data managed by jsondata.JSONData. 41 42 Attributes: 43 **data**: The data tree of JSON based objects provided 44 by the module 'json'. 45 **schema**: The validator for 'data' provided by 46 the module 'jsonschema'. 47 48 Common call parameters provided by the methods of this class are: 49 *targetnode := addressreference* 50 The target node of called method. The 'targetnode' in general 51 represents the target of the called method. In most cases this 52 has to be a reference to a container for the modification 53 and/or insertion of resulting elements. The methods require 54 the change of contained items, which involves the application 55 of a 'key' pointing to the hook in point of the reference 56 to the modification. 57 58 *key := key-value* 59 The hook-in point for references of modified entries within 60 the targetnode container. The following values are supported: 61 62 *sourcenode := addressreference* 63 The in-memory node address of the source branch for the method, 64 e.g. 'copy' or 'move' operation. 65 66 The address references supported in this class refer the resulting 67 in-memory representation of a pointer path. The target is a node 68 within a Python data representation as provided by the package 69 '**json**' and compatible packages, e.g. '**ujson**'. The supported input 70 syntax is one of the following interchangeable formats:: 71 72 # The reference to a in-memory-node. 73 addressreference := ( 74 nodereference 75 | addressreference-source 76 ) 77 78 nodereference:= ( 79 <in-memory> 80 | '' 81 ) 82 83 <in-memory> := "Memory representation of a JSON node, a 'dict' 84 or a 'list'. The in-memory Python node reference has to be 85 located within the document, due to performance reasons this 86 is not verified by default. 87 88 The 'nodereference' could be converted from the 89 'addressreference-source' representation." 90 91 '' := "Represents the whole document in accordance to RFC6901. 92 Same as 'self.data'." 93 94 # The source of the syntax for the description of the reference 95 # pointer path to a node. This is applicable on paths to be created. 96 addressreference-source := ( 97 JSONPointer 98 ) 99 100 JSONPointer:="A JSONPointer object in accordance to RFC6901. 101 for additional information on input formats refer to the 102 class documentation. 103 This class provides a fully qualified path pointer, which 104 could be converted into any of the required representations." 105 106 For hooks by 'key-value' within addressed containers:: 107 108 key-value:=(None|<list-index>|<dict-key>) 109 110 None := "When the 'key' parameter is 'None', the action 111 optionally could be based on the keys of the 'sourcenode'. 112 The contents of the branch replace the node contents 113 when the type of the branch matches the hook." 114 115 <list-index>:=('-'|int) 116 117 <dict-key>:="Valid for a 'dict' only, sets key/value pair, 118 where present is replace, new is created." 119 120 '-' := "Valid for a 'list' only, appends to present." 121 122 int := "Valid for a 'list' only, replaces present when 123 0 < #int < len(Node)." 124 125 In the parameter lists of methods used term 'pointer' is either 126 an object of class 'JSONPointer', or a list of pointer path 127 entries. 128 129 The JSON types 'object' and 'array' behave in Python slightly 130 different in accordance to RFC6902. The main difference arise 131 from the restrictions on applicable key values. Whereas the 132 ranges are limited logically by the actual container sizes, 133 the object types provide free and unlimited keys. The limit 134 is set by type restriction to unicode and 'non-nil' only 135 for keys. 136 137 """
138 - def __init__(self,appname,*args,**kargs):
139 """Loads and validates a JSON definition with the corresponding schema file. 140 141 Args: 142 appname: Name of the application. An arbitrary string representing the 143 name of an application. The name is mainly used for the default 144 name prefix of the JSON data and schema. 145 146 args*: Optional position parameters, these branch_replace corresponding key 147 parameters. 148 filelist, pathlist, filepathlist, schemafile 149 **kargs: 150 datafile: Filepathname of JSON data file, when provided a further 151 search by pathlist, filelist, and filepathlist is suppressed. 152 Therefore it has to be a valid filepathname. 153 154 default:= <appname>.json 155 filelist: List of valid filenames. 156 157 default:= <appname>.json 158 filepathlist: List of filepathnames. These are not prefixed by search 159 path components, but made absolute. 160 161 default:= [] 162 filepriority: [firstonly, lastonly, all] 163 Defines the handling of multiple occurrences of a filename at varios 164 positions. This option thus may only be altered in conjunction with 'pathlist'. 165 166 default:= all 167 indent_str: Defied the indentation of 'str'. 168 169 default:= 4 170 interactive: Hints on command line call for optional change of display format. 171 172 default:= False 173 loadcached: Caching of load for JSON data files. 174 Loads either completely into cache before transferring to 175 production entries, or immediately into production parameters, 176 which may take an effect on the remaining parameters to be loaded. 177 178 default:= False 179 nodefaultpath: Ignores the default paths, the exception is the 180 base configuration, which still is searched within the default 181 paths exclusively. 182 183 default:= False 184 nosubdata: Supresses the load of sub-data files. 185 default:= False 186 pathlist: List of pathnames for search of a valid filename. 187 Either a PATH like string, or a list of single paths. 188 189 default:= ../dirname(__file__)/etc/:dirname(__file__)/:/etc/:$HOME/etc/ 190 requires: [all, base, one] 191 Defines how to handle missing or invalid files. 192 193 default:= all 194 schema: A valid in-meory JSONschema. 195 196 default:= None 197 schemafile: Filepathname of JSONschema file. 198 199 default:= <appname>.jsd 200 validator: [default, draft3, off, ] 201 Sets schema validator for the data file. 202 The values are: default=validate, draft3=Draft3Validator, 203 off=None 204 205 default:= validate 206 207 printdata: branch=None 208 Pretty print resulting final data of branch. 209 210 default:= top 211 printschema: branch=None 212 Pretty print resulting schema. 213 214 default:= top 215 216 debug: Displays extended state data for developers. 217 Requires __debug__==True. 218 verbose: Extends the amount of the display of 219 processing data. 220 221 Returns: 222 Results in an initialized object. 223 224 Raises: 225 NameError: 226 227 JSONDataSourceFile: 228 229 JSONDataAmbiguity: 230 231 JSONDataValue: 232 233 jsonschema.ValidationError: 234 235 jsonschema.SchemaError: 236 237 """ 238 # Init basic data, control actions not to be repeated 239 _validate = kargs.get('validator',False) 240 if _validate: 241 kargs['validator'] = MODE_SCHEMA_OFF 242 JSONData.__init__(self,[],**kargs) 243 if _validate: 244 kargs['validator'] = _validate 245 246 # 247 # static final defaults 248 # 249 250 # prep import subcall 251 kimp={} 252 253 self.nodefaultpath = False 254 self.nodesubdata = False 255 self.requires = False 256 257 # Either provided explicitly, or for search. 258 self.datafile = None 259 260 afile = os.path.abspath(str(__file__)) 261 262 # The internal object schema for the framework - a fixed set of files as final MODE_SCHEMA_DRAFT4. 263 self.schemafile = kargs.get('schemafile',None) 264 if self.schema and self.schemafile: 265 # When a schema/schema file is provided, it is the only and one 266 # for the top-level, 267 raise JSONDataAmbiguity('schemafile/schema', 268 "schemafile="+str(self.schemafile), 269 "schema="+str(self.schema) 270 ) 271 272 self.nodefaultpath = kargs.get('nodefaultpath',False) 273 274 self.pathlist = kargs.get('pathlist','') 275 276 self.filelist = kargs.get('filelist',None) 277 if not self.filelist: 278 self.filelist = [ appname+'.json', ] 279 280 self.filepathlist = kargs.get('filepathlist',[]) 281 282 # positional parameters dominate, remaining are MODE_SCHEMA_DRAFT4 283 if args: 284 for i in range(0,len(args)): 285 if i == 0: 286 self.filelist = args[i] 287 elif i == 1: 288 self.pathlist = args[i] 289 elif i == 2: 290 self.filepathlist = args[i] 291 elif i == 3: 292 self.schemafile = args[i] 293 else: 294 raise JSONDataValue("unknown","args["+str(i)+"]",str(args)) 295 296 # 297 #*** Fetch parameters 298 # 299 for k,v in kargs.items(): 300 # if k == 'branch': 301 # self.branch = v 302 if k == 'datafile': 303 self.datafile = v 304 elif k == 'filepathlist': 305 self.filepathlist = v 306 elif k == 'filepriority': 307 self.filepriority = v 308 elif k == 'indent_str': 309 self.indent_str = v 310 elif k == 'loadcached': 311 self.loadcached = v 312 elif k == 'nodefaultpath': 313 self.nodefaultpath = v 314 elif k == 'nodesubdata': 315 self.nodesubdata = v 316 elif k == 'requires': 317 self.requires = v 318 elif k == 'schemafile': 319 self.schemafile = v 320 elif k == 'validator': 321 self.validator = v 322 323 if __debug__: 324 if self.debug: 325 print "DBG:self.pathlist= "+str(self.pathlist) 326 print "DBG:self.filelist= "+str(self.filelist) 327 print "DBG:self.filepathlist="+str(self.filepathlist) 328 print "DBG:self.schemafile= "+str(self.schemafile) 329 330 if type(self.pathlist) == list: # a list of single-paths 331 if not self.nodefaultpath: 332 # Fixed set of data files as final default. 333 self.pathlist.extend( 334 [os.path.dirname(afile)+os.sep+'etc'+os.sep+appname+os.sep, 335 os.pathsep+os.sep+'etc'+os.sep, 336 os.pathsep+"$HOME"+os.sep+'etc'+os.sep, 337 os.pathsep+os.path.dirname(__file__)+os.sep, 338 ]) 339 340 # expand all 341 self.pathlist = [os.path.expandvars(os.path.expanduser(p)) for p in self.pathlist] 342 #parts = [part.branch_replace('~', '~0') for part in self.parts] 343 344 else: # a PATH like variable, so do it at once 345 if not self.nodefaultpath: 346 # Fixed set of data files as the final default. 347 self.pathlist += os.path.dirname(afile)+os.sep+'etc'+os.sep+appname+os.sep+os.pathsep+os.sep+'etc'+os.sep+os.pathsep+"$HOME"+os.sep+'etc'+os.sep+os.pathsep+os.path.dirname(__file__)+os.sep 348 self.pathlist = os.path.expandvars(os.path.expanduser(self.pathlist)).split(os.pathsep) 349 350 # canonical 351 self.pathlist = [os.path.realpath(os.path.abspath(p))+os.sep for p in self.pathlist] 352 353 if not self.datafile: # No explicit given 354 if self.filelist: 355 for f in self.filelist: 356 if os.path.isabs(f): 357 self.filepathlist.append(f) 358 self.filelist.remove(f) 359 else: 360 for p in self.pathlist: 361 fx=p+os.sep+f 362 if os.path.isfile(fx): 363 self.filepathlist.append(fx) 364 if f in self.filelist: # could occur under multiple paths 365 self.filelist.remove(f) 366 367 elif not os.path.isfile(self.datafile): # a provided datafile has to exist 368 raise JSONDataSourceFile("open","datafile",str(self.datafile)) 369 370 if not self.filepathlist: 371 if not self.datafile: 372 raise JSONDataSourceFile("value","datasource",str(self.filelist)+":"+str(self.pathlist)) 373 374 # Check whether validation is requested. 375 # If so, do a last trial for plausible construction. 376 if not self.schema and self.validator != MODE_SCHEMA_OFF: 377 # require schema for validation, no schema provided, now-than... 378 if not self.schemafile: # do we have a file 379 if self.datafile: 380 if os.path.isfile(os.path.splitext(self.datafile)[0]+'.jsd'): # coallocated pair - datafile+schemafile 381 self.schemafile = os.path.splitext(self.datafile)[0]+'.jsd' 382 elif self.filepathlist: # search, use the first found 383 for f in self.filepathlist: 384 if os.path.isfile(f) and os.path.isfile(os.path.splitext(f)[0]+".jsd"): 385 self.schemafile = os.path.splitext(f)[0]+".jsd" 386 break # just use the first valid-pair 387 raise JSONDataSourceFile("open","schemafile",str(self.filepathlist)) 388 else: 389 raise JSONDataSourceFile("value","datasource",str(self.filelist)+":"+str(self.pathlist)) 390 391 # when defined => has to be present 392 if self.schemafile: 393 if not os.path.isfile(self.schemafile): 394 raise JSONDataSourceFile("open","schemafile",str(self.schemafile)) 395 396 # initialize schema 397 kargs['schemafile'] = self.schemafile 398 self.setSchema(**kargs) 399 400 if __debug__: 401 if self.debug: 402 print "DBG:self.pathlist= "+str(self.pathlist) 403 print "DBG:self.filelist= "+str(self.filelist) 404 print "DBG:self.filepathlist="+str(self.filepathlist) 405 print "DBG:self.schemafile= "+str(self.schemafile) 406 407 # 408 # load data, therefore search data files within pathlist 409 # 410 confok=False 411 onenok = False 412 if not self.datafile: # No explicit given 413 if self.filepathlist: 414 for f in self.filepathlist: 415 if self.json_import(self.branch,None,f,self.schemafile,**kimp): 416 confok=True 417 else: 418 onenok = True 419 420 if not confok: # base loaded only 421 if not self.requires: # there is a rule 422 if self.requires == 'base': # is mandatory, reaching this means is OK 423 pass 424 else: 425 raise JSONDataSourceFile("value","datasource",str(self.filepathlist)+":"+str(self.filelist)+":"+str(self.pathlist)) 426 427 else: # at least one application configuration loaded 428 if self.requires != False: # there is a rule 429 if self.requires == 'all': # no exeception allowed 430 if onenok: # one has failed 431 raise JSONDataSourceFile("value","datasource",str(self.filepathlist)+":"+str(self.filelist)+":"+str(self.pathlist)) 432 elif self.requires == 'base': # is mandatory, reaching this means is OK 433 pass 434 elif self.requires == 'one': # reaching this means is OK 435 pass 436 437 else: 438 if os.path.exists(self.datafile): 439 if not self.schemafile and self.schema: 440 kimp['schema'] = self.schema 441 self.json_import(self.branch, None, self.datafile, self.schemafile,**kimp)
442
443 - def json_export(self, sourcenode, fname, **kargs):
444 """ Exports current data for later import. 445 446 The exported data is a snapshot of current state. 447 448 Args: 449 fname: File name for the exported data. 450 451 sourcenode: Base of sub-tree for export. 452 None for complete JSON document. 453 454 **kargs: 455 ffs. 456 457 Returns: 458 When successful returns 'True', else returns either 'False', 459 or raises an exception. 460 461 Raises: 462 JSONDataTargetFile: 463 """ 464 if not sourcenode: 465 sourcenode = self.data 466 try: 467 with open(fname, 'w') as fp: 468 #ret = 469 myjson.dump(sourcenode, fp) 470 except Exception as e: 471 raise JSONDataTargetFile("open-"+str(e),"data.dump",str(fname)) 472 return True
473
474 - def json_import(self, targetnode, key, datafile, schemafile=None, **kargs):
475 """ Imports and validates JSON based data. 476 477 The contained data in 'datafile' could be either the initial data 478 tree, or a new branch defined by a fresh tree structure. The 479 'targetnode' defines the parent container where the new branch has 480 to be hooked-in. 481 482 Args: 483 targetnode: 484 Target container for the inclusion of the loaded branch. 485 For the default:='None' the 'self.data' is used. 486 key: 487 The hook within the targetnode, 488 datafile: 489 JSON data filename containing the subtree for the target branch. 490 schemafile: 491 JSON-Schema filename for validation of the subtree/branch. 492 **kargs: 493 matchcondition: 494 Defines the criteria for comparison of present child nodes 495 in the target container. The value is a list of criteria 496 combined by logical AND. The criteria may vary due to 497 the requirement and the type of applied container. 498 499 For information on applicable values refer to: 500 'JSONDataSerializer.isApplicable()' 501 502 validator: [default, draft3, off, ] 503 Sets schema validator for the data file. 504 The values are: default=validate, draft3=Draft3Validator, 505 off=None. 506 507 default:= validate 508 509 Returns: 510 When successful returns 'True', else returns either 'False', or 511 raises an exception. 512 513 Raises: 514 JSONData: 515 516 JSONDataValue: 517 518 JSONDataSourceFile: 519 520 """ 521 if self.verbose: 522 print "VERB:json_import:datafile= "+str(datafile) 523 print "VERB:json_import:schemafile= "+str(schemafile) 524 525 jval = None 526 sval = None 527 matchcondition = [] 528 529 # 530 #*** Fetch parameters 531 # 532 validator = self.validator # use class settings as MODE_SCHEMA_DRAFT4 533 for k,v in kargs.items(): 534 if k == 'matchcondition': 535 #For now just passed through to self.isApplicable() 536 if v == 'key' or v == MATCH_KEY: 537 matchcondition.append(MATCH_KEY) 538 elif v == 'no' or v == MATCH_NO: 539 matchcondition.append(MATCH_NO) 540 elif v == 'child_attr_list' or v == MATCH_CHLDATTR: 541 matchcondition.append(MATCH_CHLDATTR) 542 elif v == 'index' or v == MATCH_INDEX: 543 matchcondition.append(MATCH_INDEX) 544 elif v == 'mem' or v == MATCH_MEM: 545 matchcondition.append(MATCH_MEM) 546 else: 547 raise JSONDataValue(k,str(v)) 548 elif k == 'validator': # controls validation by JSONschema 549 if v == 'default' or v == MODE_SCHEMA_DRAFT4: 550 validator = MODE_SCHEMA_DRAFT4 551 elif v == 'draft3' or v == MODE_SCHEMA_DRAFT3: 552 validator = MODE_SCHEMA_DRAFT3 553 elif v == 'off' or v == MODE_SCHEMA_OFF: 554 validator = MODE_SCHEMA_OFF 555 else: 556 raise JSONDataValue("unknown",k,str(v)) 557 elif k == 'schema': 558 sval = v 559 560 # INPUT-BRANCH: schema for validation 561 if validator != MODE_SCHEMA_OFF: # validation requested, requires schema 562 if not schemafile: # no new import, use present data 563 if not self.schema: # no schema data present 564 raise JSONDataException("value","schema",self.schema) 565 else: 566 schemafile = os.path.abspath(schemafile) 567 if not os.path.isfile(schemafile): 568 raise JSONDataSourceFile("open","schemafile",str(schemafile)) 569 with open(schemafile) as schema_file: 570 sval = myjson.load(schema_file) 571 if not sval: 572 raise JSONDataSourceFile("read","schemafile",str(schemafile)) 573 574 # INPUT-BRANCH: data 575 datafile = os.path.abspath(datafile) 576 if not os.path.isfile(datafile): 577 raise JSONDataSourceFile("open","datafile",str(datafile)) 578 try: 579 with open(datafile) as data_file: # load data 580 jval = myjson.load(data_file) 581 except Exception as e: 582 raise JSONDataSourceFile("open","datafile",str(datafile),str(e)) 583 if not jval: 584 raise JSONDataSourceFile("read","datafile",str(datafile)) 585 586 # INPUT-BRANCH: validate data 587 self.validate(jval,sval,validator) 588 589 # 590 # TARGET-CONTAINER: manage new branch data 591 # 592 if not targetnode: # use defaults 593 if not self.data: # the initial load, thus OK in any case 594 self.data = jval 595 targetnode = self.data 596 ret = jval != None 597 else: # data history present, so decide how to handle 598 599 # Checks that the branch fits into the target container 600 if not self.isApplicable(targetnode, key, jval): 601 return False 602 603 ret = self.branch_add(targetnode,key,jval) 604 605 return ret # jval != None
606
607 - def printData(self, pretty=True, **kargs):
608 """Prints structured data. 609 610 Args: 611 pretty: Activates pretty printer for treeview, else flat. 612 613 sourcefile: Loads data from 'sourcefile' into 'source'. 614 615 default:=None 616 source: Prints data within 'source'. 617 618 default:=self.data 619 620 Returns: 621 When successful returns 'True', else returns either 'False', or 622 raises an exception. 623 624 Raises: 625 JSONDataAmbiguity: 626 627 forwarded from 'json' 628 629 """ 630 source = kargs.get('source',None) 631 sourcefile = kargs.get('sourcefile',None) 632 if sourcefile and source: 633 raise JSONDataAmbiguity('sourcefile/source', 634 "sourcefile="+str(sourcefile), 635 "source="+str(source) 636 ) 637 if sourcefile: 638 source = open(sourcefile) 639 source = myjson.load(source) 640 elif not source: 641 source = self.data # yes, almost the same... 642 643 if pretty: 644 print myjson.dumps(source,indent=self.indent) 645 else: 646 print myjson.dumps(source)
647
648 - def printSchema(self, pretty=True, **kargs):
649 """Prints structured schema. 650 651 Args: 652 pretty: Activates pretty printer for treeview, else flat. 653 654 sourcefile: Loads schema from 'sourcefile' into 'source'. 655 656 default:=None 657 source: Prints schema within 'source'. 658 659 default:=self.schema 660 661 Returns: 662 When successful returns 'True', else returns either 'False', or 663 raises an exception. 664 665 Raises: 666 JSONDataAmbiguity: 667 668 forwarded from 'json' 669 670 """ 671 source = kargs.get('source',None) 672 sourcefile = kargs.get('sourcefile',None) 673 if sourcefile and source: 674 raise JSONDataAmbiguity('sourcefile/source', 675 "sourcefile="+str(sourcefile), 676 "source="+str(source) 677 ) 678 if sourcefile: 679 source = open(sourcefile) 680 source = myjson.load(source) 681 elif not source: 682 source = self.schema # yes, almost the same... 683 684 if pretty: 685 print myjson.dumps(source,indent=self.indent) 686 else: 687 print myjson.dumps(source)
688
689 - def setSchema(self,schemafile=None, targetnode=None, **kargs):
690 """Sets schema or inserts a new branch into the current assigned schema. 691 692 The main schema(targetnode==None) is the schema related to the current 693 instance. Additional branches could be added by importing the specific 694 schema definitions into the main schema. These could either kept 695 volatile as a temporary runtime extension, or stored into a new schema 696 file in order as extension of the original for later combined reuse. 697 698 Args: 699 schemafile: 700 JSON-Schema filename for validation of the subtree/branch. 701 See also **kargs['schema']. 702 targetnode: 703 Target container hook for the inclusion of the loaded branch. 704 **kargs: 705 schema: 706 In-memory JSON-Schema as an alternative to schemafile. 707 When provided the 'schemafile' is ignored. 708 709 default:=None 710 validator: [default, draft3, off, ] 711 Sets schema validator for the data file. 712 The values are: default=validate, draft3=Draft3Validator, 713 off=None. 714 715 default:= validate 716 persistent: 717 Stores the 'schema' persistently into 'schemafile' after 718 completion of update including addition of branches. 719 Requires valid 'schemafile'. 720 721 default:=False 722 723 Returns: 724 When successful returns 'True', else returns either 'False', or 725 raises an exception. 726 727 Raises: 728 729 JSONData: 730 731 JSONDataSourceFile: 732 733 JSONDataValue: 734 735 """ 736 if __debug__: 737 if self.debug: 738 print "DBG:setSchema:schemafile="+str(schemafile) 739 740 # 741 #*** Fetch parameters 742 # 743 datafile = None 744 validator = self.validator # use class settings as MODE_SCHEMA_DRAFT4 745 persistent = False 746 schema = None 747 for k,v in kargs.items(): 748 if k == 'validator': # controls validation by JSONschema 749 if v == 'default' or v == MODE_SCHEMA_DRAFT4: 750 validator = MODE_SCHEMA_DRAFT4 751 elif v == 'draft3' or v == MODE_SCHEMA_DRAFT3: 752 validator = MODE_SCHEMA_DRAFT3 753 elif v == 'off' or v == MODE_SCHEMA_OFF: 754 validator = MODE_SCHEMA_OFF 755 else: 756 raise JSONDataValue("unknown",k,str(v)) 757 elif k == 'schema': 758 schema = v 759 elif k == 'datafile': 760 datafile = v 761 elif k == 'persistent': 762 persistent = v 763 764 if schemafile != None: # change filename 765 self.schemafile = schemafile 766 elif self.schemafile != None: # use present 767 schemafile = self.schemafile 768 elif datafile != None: # derive coallocated from config 769 schemafile = os.path.splitext(self.datafile)[0]+'.jsd' 770 if not os.path.isfile(schemafile): 771 schemafile = None 772 else: 773 self.schemafile = schemafile 774 775 if not schemafile: 776 if persistent: # persistence requires storage 777 raise JSONDataTargetFile("open","JSONSchemaFilename",schemafile) 778 779 # schema for validation 780 if schema: # use loaded 781 pass 782 783 elif schemafile: # load from file 784 schemafile = os.path.abspath(schemafile) 785 self.schemafile = schemafile 786 if not os.path.isfile(schemafile): 787 raise JSONDataSourceFile("open","schemafile",str(schemafile)) 788 with open(schemafile) as schema_file: 789 schema = myjson.load(schema_file) 790 if schema == None: 791 raise JSONDataSourceFile("read","schemafile",str(schemafile)) 792 793 else: # missing at all 794 raise JSONDataSourceFile("open","schemafile",str(schemafile)) 795 pass 796 797 # 798 # manage new branch data 799 # 800 if not targetnode: 801 self.schema = schema 802 803 else: # data history present, so decide how to handle 804 805 # the container hook has to match for insertion- 806 if type(targetnode) != type(schema): 807 raise JSONDataException("type","target!=branch",str(type(targetnode))+"!="+str(type(schema))) 808 809 self.branch_add(targetnode,schema) 810 811 return schema != None
812 813 from jsondata.JSONPointer import JSONPointer 814 # avoid nested recursion problems 815