Package PyFoam :: Package RunDictionary :: Module ParsedParameterFile
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.RunDictionary.ParsedParameterFile

  1  #  ICE Revision: $Id: ParsedParameterFile.py 10078 2009-03-02 18:34:41Z bgschaid $  
  2  """Parameter file is read into memory and modified there""" 
  3   
  4  from FileBasis import FileBasisBackup 
  5  from PyFoam.Basics.PlyParser import PlyParser 
  6  from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator 
  7   
  8  from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList 
  9   
 10  from PyFoam.Error import error 
 11   
 12  from os import path 
 13  from copy import deepcopy 
 14   
15 -class ParsedParameterFile(FileBasisBackup):
16 """ Parameterfile whose complete representation is read into 17 memory, can be manipulated and afterwards written to disk""" 18
19 - def __init__(self, 20 name, 21 backup=False, 22 debug=False, 23 boundaryDict=False, 24 listDict=False, 25 listDictWithHeader=False, 26 listLengthUnparsed=None, 27 noHeader=False, 28 noBody=False, 29 doMacroExpansion=False, 30 dontRead=False):
31 """@param name: The name of the parameter file 32 @param backup: create a backup-copy of the file 33 @param boundaryDict: the file to parse is a boundary file 34 @param listDict: the file only contains a list 35 @param listDictWithHeader: the file only contains a list and a header 36 @param listLengthUnparsed: Lists longer than that length are not parsed 37 @param noHeader: don't expect a header 38 @param noBody: don't read the body of the file (only the header) 39 @param doMacroExpansion: expand #include and $var 40 @param dontRead: Do not read the file during construction 41 """ 42 43 self.noHeader=noHeader 44 self.noBody=noBody 45 FileBasisBackup.__init__(self,name,backup=backup) 46 self.debug=debug 47 self.boundaryDict=boundaryDict 48 self.listDict=listDict 49 self.listDictWithHeader=listDictWithHeader 50 self.listLengthUnparsed=listLengthUnparsed 51 self.doMacros=doMacroExpansion 52 53 self.header=None 54 self.content=None 55 56 if not dontRead: 57 self.readFile()
58
59 - def parse(self,content):
60 """Constructs a representation of the file""" 61 parser=FoamFileParser(content, 62 debug=self.debug, 63 fName=self.name, 64 boundaryDict=self.boundaryDict, 65 listDict=self.listDict, 66 listDictWithHeader=self.listDictWithHeader, 67 listLengthUnparsed=self.listLengthUnparsed, 68 noHeader=self.noHeader, 69 noBody=self.noBody, 70 doMacroExpansion=self.doMacros) 71 72 self.content=parser.getData() 73 self.header=parser.getHeader() 74 return self.content
75
76 - def __contains__(self,key):
77 return key in self.content
78
79 - def __getitem__(self,key):
80 return self.content[key]
81
82 - def __setitem__(self,key,value):
83 self.content[key]=value
84
85 - def __delitem__(self,key):
86 del self.content[key]
87
88 - def __len__(self):
89 return len(self.content)
90
91 - def __iter__(self):
92 for key in self.content: 93 yield key
94
95 - def __str__(self):
96 """Generates a string from the contents in memory 97 Used to be called makeString""" 98 99 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n" 100 101 generator=FoamFileGenerator(self.content,header=self.header) 102 string+=generator.makeString(firstLevel=True) 103 104 return string
105
106 -class WriteParameterFile(ParsedParameterFile):
107 """A specialization that is used to only write to the file"""
108 - def __init__(self, 109 name, 110 backup=False, 111 className="dictionary", 112 objectName=None):
113 ParsedParameterFile.__init__(self, 114 name, 115 backup=backup, 116 dontRead=True) 117 118 if objectName==None: 119 objectName=path.basename(name) 120 121 self.content={} 122 self.header={"version":"2.0", 123 "format":"ascii", 124 "class":className, 125 "object":objectName}
126
127 -class FoamFileParser(PlyParser):
128 """Class that parses a string that contains the contents of an 129 OpenFOAM-file and builds a nested structure of directories and 130 lists from it""" 131
132 - def __init__(self, 133 content, 134 fName=None, 135 debug=False, 136 noHeader=False, 137 noBody=False, 138 doMacroExpansion=False, 139 boundaryDict=False, 140 preserveComments=True, 141 preserveNewlines=True, 142 listDict=False, 143 listDictWithHeader=False, 144 listLengthUnparsed=None):
145 """@param content: the string to be parsed 146 @param fName: Name of the actual file (if any) 147 @param debug: output debug information during parsing 148 @param noHeader: switch that turns off the parsing of the header""" 149 150 self.fName=fName 151 self.data=None 152 self.header=None 153 self.debug=debug 154 self.listLengthUnparsed=listLengthUnparsed 155 self.doMacros=doMacroExpansion 156 self.preserveComments=preserveComments 157 self.preserveNewLines=preserveNewlines 158 159 self.collectDecorations=False 160 161 self._decorationBuffer="" 162 163 startCnt=0 164 165 if noBody: 166 self.start='noBody' 167 startCnt+=1 168 169 if noHeader: 170 self.start='noHeader' 171 startCnt+=1 172 173 if listDict: 174 self.start='pureList' 175 startCnt+=1 176 177 if listDictWithHeader: 178 self.start='pureListWithHeader' 179 startCnt+=1 180 181 if boundaryDict: 182 self.start='boundaryDict' 183 startCnt+=1 184 185 if startCnt>1: 186 error("Only one start symbol can be specified.",startCnt,"are specified") 187 188 PlyParser.__init__(self,debug=debug) 189 190 #sys.setrecursionlimit(50000) 191 #print sys.getrecursionlimit() 192 193 self.emptyCnt=0 194 195 self.temp=None 196 self.rootDict=True 197 198 self.header,self.data=self.parse(content)
199
200 - def __contains__(self,key):
201 return key in self.data
202
203 - def __getitem__(self,key):
204 return self.data[key]
205
206 - def __setitem__(self,key,value):
207 self.data[key]=value
208
209 - def __delitem__(self,key):
210 del self.data[key]
211 212 ## def __len__(self): 213 ## if self.data==None: 214 ## return 0 215 ## else: 216 ## return len(self.data) 217
218 - def resetDecoration(self):
219 self._decorationBuffer=""
220
221 - def addToDecoration(self,text):
222 if self.collectDecorations: 223 self._decorationBuffer+=text
224
225 - def addCommentToDecorations(self,text):
226 if self.preserveComments: 227 self.addToDecoration(text)
228
229 - def addNewlinesToDecorations(self,text):
230 if self.preserveNewLines: 231 self.addToDecoration(text)
232
233 - def getDecoration(self):
234 tmp=self._decorationBuffer 235 self.resetDecoration() 236 if len(tmp)>0: 237 if tmp[-1]=='\n': 238 tmp=tmp[:-1] 239 return tmp
240
241 - def directory(self):
242 if self.fName==None: 243 return path.curdir 244 else: 245 return path.dirname(self.fName)
246
247 - def getData(self):
248 """ Get the data structure""" 249 return self.data
250
251 - def getHeader(self):
252 """ Get the OpenFOAM-header""" 253 return self.header
254
255 - def printContext(self,c,ind):
256 """Prints the context of the current index""" 257 print "------" 258 print c[max(0,ind-100):max(0,ind-1)] 259 print "------" 260 print ">",c[ind-1],"<" 261 print "------" 262 print c[min(len(c),ind):min(len(c),ind+100)] 263 print "------"
264
265 - def parserError(self,text,c,ind):
266 """Prints the error message of the parser and exit""" 267 print "PARSER ERROR:",text 268 print "On index",ind 269 self.printContext(c,ind) 270 raise PyFoamParserError("Unspecified")
271 272 tokens = ( 273 'NAME', 274 'ICONST', 275 'FCONST', 276 'SCONST', 277 'FOAMFILE', 278 'UNIFORM', 279 'NONUNIFORM', 280 'UNPARSEDCHUNK', 281 'REACTION', 282 'SUBSTITUTION', 283 'MERGE', 284 'OVERWRITE', 285 'ERROR', 286 'DEFAULT', 287 'INCLUDE', 288 'REMOVE', 289 'INPUTMODE', 290 'KANALGITTER', 291 ) 292 293 reserved = { 294 'FoamFile' : 'FOAMFILE', 295 'uniform' : 'UNIFORM', 296 'nonuniform' : 'NONUNIFORM', 297 'include' : 'INCLUDE', 298 'remove' : 'REMOVE', 299 'inputMode' : 'INPUTMODE', 300 'merge' : 'MERGE', 301 'overwrite' : 'OVERWRITE', 302 'error' : 'ERROR', 303 'default' : 'DEFAULT', 304 } 305 306 states = ( 307 ('unparsed', 'exclusive'), 308 ) 309
310 - def t_unparsed_left(self,t):
311 r'\(' 312 t.lexer.level+=1
313 # print "left",t.lexer.level, 314
315 - def t_unparsed_right(self,t):
316 r'\)' 317 t.lexer.level-=1 318 # print "right",t.lexer.level, 319 if t.lexer.level < 0 : 320 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1] 321 # print t.value 322 t.lexer.lexpos-=1 323 t.type = "UNPARSEDCHUNK" 324 t.lexer.lineno += t.value.count('\n') 325 t.lexer.begin('INITIAL') 326 return t
327 328 t_unparsed_ignore = ' \t\n0123456789.-+e' 329
330 - def t_unparsed_error(self,t):
331 print "Error",t.lexer.lexdata[t.lexer.lexpos] 332 t.lexer.skip(1)
333
334 - def t_NAME(self,t):
335 r'[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 336 t.type=self.reserved.get(t.value,'NAME') 337 if t.value[-1]==")": 338 if t.value.count(")")>t.value.count("("): 339 # Give back the last ) because it propably belongs to a list 340 t.value=t.value[:-1] 341 t.lexer.lexpos-=1 342 343 return t
344
345 - def t_SUBSTITUITION(self,t):
346 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 347 t.type=self.reserved.get(t.value,'SUBSTITUTION') 348 if t.value[-1]==")": 349 if t.value.count(")")>t.value.count("("): 350 # Give back the last ) because it propably belongs to a list 351 t.value=t.value[:-1] 352 t.lexer.lexpos-=1 353 354 return t
355 356 t_KANALGITTER = r'\#' 357 358 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 359 360 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 361 362 t_SCONST = r'\"([^\\\n]|(\\.))*?\"' 363 364 literals = "(){};[]" 365 366 t_ignore=" \t\r" 367 368 # Define a rule so we can track line numbers
369 - def t_newline(self,t):
370 r'\n+' 371 t.lexer.lineno += len(t.value) 372 now=t.lexer.lexpos 373 next=t.lexer.lexdata.find('\n',now) 374 if next>=0: 375 line=t.lexer.lexdata[now:next] 376 pos=line.find("=") 377 if pos>=0: 378 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos): 379 return 380 t.value = line 381 t.type = "REACTION" 382 t.lexer.lineno += 1 383 t.lexer.lexpos = next 384 return t
385 # self.addNewlinesToDecorations(t.value) 386 387 # C or C++ comment (ignore)
388 - def t_ccode_comment(self,t):
389 r'(/\*(.|\n)*?\*/)|(//.*)' 390 t.lexer.lineno += t.value.count('\n') 391 self.addCommentToDecorations(t.value) 392 pass
393 394 # Error handling rule
395 - def t_error(self,t):
396 print "Illegal character '%s'" % t.value[0] 397 t.lexer.skip(1)
398
399 - def p_global(self,p):
400 'global : header clearTemp dictbody' 401 p[0] = ( p[1] , p[3] )
402
403 - def p_clearTemp(self,p):
404 'clearTemp :' 405 self.collectDecorations=True 406 self.rootDict=True 407 self.temp=None
408
409 - def p_gotHeader(self,p):
410 'gotHeader :' 411 p.lexer.lexpos=len(p.lexer.lexdata)
412
413 - def p_noBody(self,p):
414 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' ''' 415 p[0] = ( p[3] , {} )
416
417 - def p_noHeader(self,p):
418 'noHeader : dictbody' 419 p[0] = ( None , p[1] )
420
421 - def p_pureList(self,p):
422 'pureList : list' 423 p[0] = ( None , p[1] )
424
425 - def p_pureListWithHeader(self,p):
426 '''pureListWithHeader : header list 427 | header prelist ''' 428 p[0] = ( p[1] , p[2] )
429
430 - def p_boundaryDict(self,p):
431 '''boundaryDict : header list 432 | header prelist ''' 433 # p[0] = ( p[1] , dict(zip(p[2][::2],p[2][1::2])) ) 434 p[0] = ( p[1] , p[2] )
435
436 - def p_header(self,p):
437 'header : FOAMFILE dictionary' 438 p[0] = p[2]
439
440 - def p_macro(self,p):
441 '''macro : KANALGITTER include 442 | KANALGITTER inputMode 443 | KANALGITTER remove''' 444 p[0] = p[1]+p[2]+"\n" 445 if self.doMacros: 446 p[0]="// "+p[0]
447
448 - def p_include(self,p):
449 '''include : INCLUDE SCONST''' 450 if self.doMacros: 451 fName=path.join(self.directory(),p[2][1:-1]) 452 data=ParsedParameterFile(fName,noHeader=True) 453 if self.temp==None: 454 self.temp=DictProxy() 455 for k in data: 456 self.temp[k]=data[k] 457 458 p[0] = p[1] + " " + p[2]
459
460 - def p_inputMode(self,p):
461 '''inputMode : INPUTMODE ERROR 462 | INPUTMODE DEFAULT 463 | INPUTMODE MERGE 464 | INPUTMODE OVERWRITE''' 465 p[0] = p[1] + " " + p[2]
466
467 - def p_remove(self,p):
468 '''remove : REMOVE word 469 | REMOVE wlist''' 470 p[0] = p[1] + " " 471 if type(p[2])==str: 472 p[0]+=p[2] 473 else: 474 p[0]+="( " 475 for w in p[2]: 476 p[0]+=w+" " 477 p[0]+=")"
478
479 - def p_integer(self,p):
480 '''integer : ICONST''' 481 p[0] = int(p[1])
482
483 - def p_float(self,p):
484 '''integer : FCONST''' 485 p[0] = float(p[1])
486
487 - def p_dictionary(self,p):
488 '''dictionary : '{' dictbody '}' 489 | '{' '}' ''' 490 if len(p)==4: 491 p[0] = p[2] 492 else: 493 p[0] = DictProxy()
494
495 - def p_dictbody(self,p):
496 '''dictbody : dictbody dictline 497 | dictline 498 | empty''' 499 500 if len(p)==3: 501 p[0]=p[1] 502 p[0][p[2][0]]=p[2][1] 503 p[0].addDecoration(p[2][0],self.getDecoration()) 504 else: 505 p[0]=DictProxy() 506 507 if self.temp==None: 508 self.temp=p[0] 509 elif self.rootDict: 510 for k,v in self.temp.iteritems(): 511 if type(k)!=int: 512 p[0][k]=v 513 else: 514 p[0][self.emptyCnt]=v 515 self.emptyCnt+=1 516 517 self.temp=p[0] 518 519 self.rootDict=False 520 521 if p[1]: 522 p[0][p[1][0]]=p[1][1]
523 524
525 - def p_list(self,p):
526 '''list : '(' itemlist ')' ''' 527 p[0] = p[2] 528 if len(p[2])==3 or len(p[2])==9 or len(p[2])==6: 529 isVector=True 530 for i in p[2]: 531 try: 532 float(i) 533 except: 534 isVector=False 535 if isVector: 536 if len(p[2])==3: 537 p[0]=apply(Vector,p[2]) 538 elif len(p[2])==9: 539 p[0]=apply(Tensor,p[2]) 540 else: 541 p[0]=apply(SymmTensor,p[2])
542
543 - def p_wlist(self,p):
544 '''wlist : '(' wordlist ')' ''' 545 p[0] = p[2]
546
547 - def p_unparsed(self,p):
548 '''unparsed : UNPARSEDCHUNK''' 549 p[0] = Unparsed(p[1])
550
551 - def p_prelist_seen(self,p):
552 '''prelist_seen : ''' 553 if self.listLengthUnparsed!=None: 554 # print "Hepp" 555 if int(p[-1])>=self.listLengthUnparsed: 556 # print "Ho",p.lexer.lexpos,p.lexer.lexdata[p.lexer.lexpos-1:p.lexer.lexpos+2],p[1],len(p[1]) 557 p.lexer.begin('unparsed') 558 p.lexer.level=0 559 p.lexer.code_start = p.lexer.lexpos
560 561 # t=p.lexer.token() 562 563 ## print t.type 564 ## return t 565 # p[0] = None 566
567 - def p_prelist(self,p):
568 '''prelist : integer prelist_seen '(' itemlist ')' 569 | integer prelist_seen '(' unparsed ')' ''' 570 if type(p[4])==Unparsed: 571 p[0] = UnparsedList(int(p[1]),p[4].data) 572 else: 573 p[0] = p[4]
574
575 - def p_itemlist(self,p):
576 '''itemlist : itemlist item 577 | item ''' 578 if len(p)==2: 579 if p[1]==None: 580 p[0]=[] 581 else: 582 p[0]=[ p[1] ] 583 else: 584 p[0]=p[1] 585 p[0].append(p[2])
586
587 - def p_wordlist(self,p):
588 '''wordlist : wordlist word 589 | word ''' 590 if len(p)==2: 591 if p[1]==None: 592 p[0]=[] 593 else: 594 p[0]=[ p[1] ] 595 else: 596 p[0]=p[1] 597 p[0].append(p[2])
598
599 - def p_word(self,p):
600 '''word : NAME 601 | UNIFORM 602 | NONUNIFORM 603 | MERGE 604 | OVERWRITE 605 | DEFAULT 606 | ERROR''' 607 p[0]=p[1]
608
609 - def p_substitution(self,p):
610 '''substitution : SUBSTITUTION''' 611 if self.doMacros: 612 nm=p[1][1:] 613 p[0]="<Symbol '"+nm+"' not found>" 614 if self.temp==None: 615 return 616 if nm in self.temp: 617 p[0]=deepcopy(self.temp[nm]) 618 else: 619 p[0]=p[1]
620
621 - def p_dictline(self,p):
622 '''dictline : word dictitem ';' 623 | word list ';' 624 | word prelist ';' 625 | word fieldvalue ';' 626 | macro 627 | word dictionary''' 628 if len(p)==4 and type(p[2])==list: 629 # remove the prefix from long lists (if present) 630 doAgain=True 631 tmp=p[2] 632 while doAgain: 633 doAgain=False 634 for i in range(len(tmp)-1): 635 if type(tmp[i])==int and type(tmp[i+1]) in [list]: 636 if tmp[i]==len(tmp[i+1]): 637 nix=tmp[:i]+tmp[i+1:] 638 for i in range(len(tmp)): 639 tmp.pop() 640 tmp.extend(nix) 641 doAgain=True 642 break 643 if len(p)>=3: 644 p[0] = ( p[1] , p[2] ) 645 else: 646 p[0] = ( self.emptyCnt , p[1] ) 647 self.emptyCnt+=1
648
649 - def p_number(self,p):
650 '''number : integer 651 | FCONST''' 652 p[0] = p[1]
653
654 - def p_dimension(self,p):
655 '''dimension : '[' number number number number number number number ']' 656 | '[' number number number number number ']' ''' 657 result=p[2:-1] 658 if len(result)==5: 659 result+=[0,0] 660 661 p[0]=apply(Dimension,result)
662
663 - def p_vector(self,p):
664 '''vector : '(' number number number ')' ''' 665 p[0]=apply(Vector,p[2:5])
666
667 - def p_tensor(self,p):
668 '''tensor : '(' number number number number number number number number number ')' ''' 669 p[0]=apply(Tensor,p[2:11])
670
671 - def p_symmtensor(self,p):
672 '''symmtensor : '(' number number number number number number ')' ''' 673 p[0]=apply(SymmTensor,p[2:8])
674
675 - def p_fieldvalue_uniform(self,p):
676 '''fieldvalue : UNIFORM number 677 | UNIFORM vector 678 | UNIFORM tensor 679 | UNIFORM symmtensor''' 680 p[0] = Field(p[2])
681
682 - def p_fieldvalue_nonuniform(self,p):
683 '''fieldvalue : NONUNIFORM NAME list 684 | NONUNIFORM NAME prelist''' 685 p[0] = Field(p[3],name=p[2])
686
687 - def p_dictitem(self,p):
688 '''dictitem : longitem 689 | pitem''' 690 if type(p[1])==tuple: 691 p[0]=TupleProxy(p[1]) 692 else: 693 p[0] = p[1]
694
695 - def p_longitem(self,p):
696 '''longitem : pitemlist pitem''' 697 p[0] = p[1]+(p[2],)
698
699 - def p_pitemlist(self,p):
700 '''pitemlist : pitemlist pitem 701 | pitem ''' 702 if len(p)==2: 703 p[0]=(p[1],) 704 else: 705 ## print type(p[1][-1]) 706 ## if type(p[1][-1])==int and type(p[2])==tuple: 707 ## print "Hepp",p[2] 708 p[0]=p[1]+(p[2],)
709
710 - def p_pitem(self,p):
711 '''pitem : word 712 | SCONST 713 | number 714 | dictionary 715 | list 716 | dimension 717 | substitution 718 | empty''' 719 p[0] = p[1]
720
721 - def p_item(self,p):
722 '''item : pitem 723 | REACTION 724 | list 725 | dictionary''' 726 p[0] = p[1]
727
728 - def p_empty(self,p):
729 'empty :' 730 pass
731
732 - def p_error(self,p):
733 raise PyFoamParserError("Syntax error at token", p) # .type, p.lineno
734 # Just discard the token and tell the parser it's okay. 735 # self.yacc.errok() 736
737 -class PyFoamParserError:
738 - def __init__(self,descr,data=None):
739 self.descr=descr 740 self.data=data
741
742 - def __str__(self):
743 result="Error in PyFoamParser: '"+self.descr+"'" 744 if self.data!=None: 745 val=self.data.value 746 if len(val)>100: 747 val=val[:40]+" .... "+val[-40:] 748 749 result+=" @ %r (Type: %s ) in line %d at position %d" % (val, 750 self.data.type, 751 self.data.lineno, 752 self.data.lexpos) 753 754 return result
755
756 - def __repr__(self):
757 return str(self)
758
759 -class FoamStringParser(FoamFileParser):
760 """Convenience class that parses only a headerless OpenFOAM dictionary""" 761
762 - def __init__(self,content,debug=False):
763 """@param content: the string to be parsed 764 @param debug: output debug information during parsing""" 765 766 FoamFileParser.__init__(self,content,debug=debug,noHeader=True,boundaryDict=False)
767
768 - def __str__(self):
769 return str(FoamFileGenerator(self.data))
770
771 -class ParsedBoundaryDict(ParsedParameterFile):
772 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file""" 773
774 - def __init__(self,name,backup=False,debug=False):
775 """@param name: The name of the parameter file 776 @param backup: create a backup-copy of the file""" 777 778 ParsedParameterFile.__init__(self,name,backup=backup,debug=debug,boundaryDict=True)
779
780 - def parse(self,content):
781 """Constructs a representation of the file""" 782 temp=ParsedParameterFile.parse(self,content) 783 self.content={} 784 for i in range(0,len(temp),2): 785 self.content[temp[i]]=temp[i+1] 786 return self.content
787
788 - def __str__(self):
789 string="// File generated by PyFoam - sorry for the ugliness\n\n" 790 temp=[] 791 for k,v in self.content.iteritems(): 792 temp.append((k,v)) 793 794 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"]))) 795 796 temp2=[] 797 798 for b in temp: 799 temp2.append(b[0]) 800 temp2.append(b[1]) 801 802 generator=FoamFileGenerator(temp2,header=self.header) 803 string+=str(generator) 804 805 return string
806
807 -class ParsedFileHeader(ParsedParameterFile):
808 """Only parse the header of a file""" 809
810 - def __init__(self,name):
811 ParsedParameterFile.__init__(self,name,backup=False,noBody=True)
812
813 - def __getitem__(self,name):
814 return self.header[name]
815
816 - def __contains__(self,name):
817 return name in self.header
818
819 - def __len__(self):
820 return len(self.header)
821