1
2 """Parameter file is read into memory and modified there"""
3
4 from FileBasis import FileBasisBackup
5 from PyFoam.Basics.PlyParser import PlyParser
6 from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator
7
8 from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList,Codestream
9
10 from PyFoam.Error import error,warning,FatalErrorPyFoamException
11
12 from os import path
13 from copy import deepcopy
14
16 """ Parameterfile whose complete representation is read into
17 memory, can be manipulated and afterwards written to disk"""
18
19 - def __init__(self,
20 name,
21 backup=False,
22 debug=False,
23 boundaryDict=False,
24 listDict=False,
25 listDictWithHeader=False,
26 listLengthUnparsed=None,
27 noHeader=False,
28 binaryMode=False,
29 noBody=False,
30 doMacroExpansion=False,
31 dontRead=False,
32 createZipped=True):
33 """@param name: The name of the parameter file
34 @param backup: create a backup-copy of the file
35 @param boundaryDict: the file to parse is a boundary file
36 @param listDict: the file only contains a list
37 @param listDictWithHeader: the file only contains a list and a header
38 @param listLengthUnparsed: Lists longer than that length are not parsed
39 @param binaryMode: Parse long lists in binary mode (to be overridden by
40 the settings in the header
41 @param noHeader: don't expect a header
42 @param noBody: don't read the body of the file (only the header)
43 @param doMacroExpansion: expand #include and $var
44 @param dontRead: Do not read the file during construction
45 """
46
47 self.noHeader=noHeader
48 self.noBody=noBody
49 FileBasisBackup.__init__(self,
50 name,
51 backup=backup,
52 createZipped=createZipped)
53 self.debug=debug
54 self.boundaryDict=boundaryDict
55 self.listDict=listDict
56 self.listDictWithHeader=listDictWithHeader
57 self.listLengthUnparsed=listLengthUnparsed
58 self.doMacros=doMacroExpansion
59
60 self.header=None
61 self.content=None
62
63 self.binaryMode=binaryMode
64
65 if not dontRead:
66 self.readFile()
67
69 """Constructs a representation of the file"""
70 parser=FoamFileParser(content,
71 debug=self.debug,
72 fName=self.name,
73 boundaryDict=self.boundaryDict,
74 listDict=self.listDict,
75 listDictWithHeader=self.listDictWithHeader,
76 listLengthUnparsed=self.listLengthUnparsed,
77 noHeader=self.noHeader,
78 noBody=self.noBody,
79 binaryMode=self.binaryMode,
80 doMacroExpansion=self.doMacros)
81
82 self.content=parser.getData()
83 self.header=parser.getHeader()
84 return self.content
85
87 return key in self.content
88
90 return self.content[key]
91
93 self.content[key]=value
94
97
99 return len(self.content)
100
102 for key in self.content:
103 yield key
104
106 """Generates a string from the contents in memory
107 Used to be called makeString"""
108
109 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n"
110
111 generator=FoamFileGenerator(self.content,header=self.header)
112 string+=generator.makeString(firstLevel=True)
113
114 return string
115
117 """A specialization that is used to only write to the file"""
118 - def __init__(self,
119 name,
120 backup=False,
121 className="dictionary",
122 objectName=None,
123 createZipped=False):
124 ParsedParameterFile.__init__(self,
125 name,
126 backup=backup,
127 dontRead=True,
128 createZipped=createZipped)
129
130 if objectName==None:
131 objectName=path.basename(name)
132
133 self.content={}
134 self.header={"version":"2.0",
135 "format":"ascii",
136 "class":className,
137 "object":objectName}
138
141 for number, name in enumerate(names):
142 setattr(self, name, number)
143
144 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"])
145
147 """Class that parses a string that contains the contents of an
148 OpenFOAM-file and builds a nested structure of directories and
149 lists from it"""
150
151 - def __init__(self,
152 content,
153 fName=None,
154 debug=False,
155 noHeader=False,
156 noBody=False,
157 doMacroExpansion=False,
158 boundaryDict=False,
159 preserveComments=True,
160 preserveNewlines=True,
161 listDict=False,
162 listDictWithHeader=False,
163 listLengthUnparsed=None,
164 binaryMode=False,
165 duplicateCheck=False,
166 duplicateFail=True):
167 """@param content: the string to be parsed
168 @param fName: Name of the actual file (if any)
169 @param debug: output debug information during parsing
170 @param noHeader: switch that turns off the parsing of the header
171 @param duplicateCheck: Check for duplicates in dictionaries
172 @param duplicateFail: Fail if a duplicate is discovered"""
173
174 self.binaryMode=binaryMode
175 self.fName=fName
176 self.data=None
177 self.header=None
178 self.debug=debug
179 self.listLengthUnparsed=listLengthUnparsed
180 self.doMacros=doMacroExpansion
181 self.preserveComments=preserveComments
182 self.preserveNewLines=preserveNewlines
183 self.duplicateCheck=duplicateCheck
184 self.duplicateFail=duplicateFail
185
186 self.collectDecorations=False
187 self.inputMode=inputModes.merge
188
189 self._decorationBuffer=""
190
191 startCnt=0
192
193 self.dictStack=[DictProxy()]
194
195 if noBody:
196 self.start='noBody'
197 startCnt+=1
198
199 if noHeader:
200 self.start='noHeader'
201 startCnt+=1
202
203 if listDict:
204 self.start='pureList'
205 startCnt+=1
206 self.dictStack=[]
207
208 if listDictWithHeader:
209 self.start='pureListWithHeader'
210 startCnt+=1
211
212 if boundaryDict:
213 self.start='boundaryDict'
214 startCnt+=1
215
216 if startCnt>1:
217 error("Only one start symbol can be specified.",startCnt,"are specified")
218
219 PlyParser.__init__(self,debug=debug)
220
221
222
223
224 self.emptyCnt=0
225
226 self.header,self.data=self.parse(content)
227
229 return key in self.data
230
232 return self.data[key]
233
236
239
241 for key in self.data:
242 yield key
243
244
245
246
247
248
249
251 self._decorationBuffer=""
252
254 if self.collectDecorations:
255 self._decorationBuffer+=text
256
260
264
266 tmp=self._decorationBuffer
267 self.resetDecoration()
268 if len(tmp)>0:
269 if tmp[-1]=='\n':
270 tmp=tmp[:-1]
271 return tmp
272
274 if self.fName==None:
275 return path.curdir
276 else:
277 return path.dirname(self.fName)
278
280 """ Get the data structure"""
281 return self.data
282
284 """ Get the OpenFOAM-header"""
285 return self.header
286
287 - def printContext(self,c,ind):
288 """Prints the context of the current index"""
289 print "------"
290 print c[max(0,ind-100):max(0,ind-1)]
291 print "------"
292 print ">",c[ind-1],"<"
293 print "------"
294 print c[min(len(c),ind):min(len(c),ind+100)]
295 print "------"
296
298 """Prints the error message of the parser and exit"""
299 print "PARSER ERROR:",text
300 print "On index",ind
301 self.printContext(c,ind)
302 raise PyFoamParserError("Unspecified")
303
305 """Checks whether this list is a list that consists only of prefix-Lists"""
306 isAllPreList=False
307 if (len(orig) % 2)==0:
308 isAllPreList=True
309 for i in range(0,len(orig),2):
310 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]):
311 if len(orig[i+1])!=orig[i]:
312 isAllPreList=False
313 break
314 else:
315 isAllPreList=False
316 break
317
318 if isAllPreList:
319 return orig[1::2]
320 else:
321 return orig
322
323 tokens = (
324 'NAME',
325 'ICONST',
326 'FCONST',
327 'SCONST',
328 'FOAMFILE',
329 'UNIFORM',
330 'NONUNIFORM',
331 'UNPARSEDCHUNK',
332 'CODESTREAMCHUNK',
333 'REACTION',
334 'SUBSTITUTION',
335 'MERGE',
336 'OVERWRITE',
337 'ERROR',
338 'WARN',
339 'PROTECT',
340 'DEFAULT',
341 'INCLUDE',
342 'INCLUDEIFPRESENT',
343 'REMOVE',
344 'INPUTMODE',
345 'KANALGITTER',
346 'CODESTART',
347 'CODEEND',
348 )
349
350 reserved = {
351 'FoamFile' : 'FOAMFILE',
352 'uniform' : 'UNIFORM',
353 'nonuniform' : 'NONUNIFORM',
354 'include' : 'INCLUDE',
355 'includeIfPresent': 'INCLUDEIFPRESENT',
356 'remove' : 'REMOVE',
357 'inputMode' : 'INPUTMODE',
358 'merge' : 'MERGE',
359 'overwrite' : 'OVERWRITE',
360 'error' : 'ERROR',
361 'warn' : 'WARN',
362 'protect' : 'PROTECT',
363 'default' : 'DEFAULT',
364 }
365
366 states = (
367 ('unparsed', 'exclusive'),
368 ('codestream', 'exclusive'),
369 )
370
372 r'\('
373 t.lexer.level+=1
374
375
377 r'\)'
378 t.lexer.level-=1
379
380 if t.lexer.level < 0 :
381 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1]
382
383 t.lexer.lexpos-=1
384 t.type = "UNPARSEDCHUNK"
385 t.lexer.lineno += t.value.count('\n')
386 t.lexer.begin('INITIAL')
387 return t
388
389 t_unparsed_ignore = ' \t\n0123456789.-+e'
390
392 print "Error",t.lexer.lexdata[t.lexer.lexpos]
393 t.lexer.skip(1)
394
396 r"\#\}"
397 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-2]
398 t.lexer.lexpos-=2
399 t.type = "CODESTREAMCHUNK"
400 t.lexer.lineno += t.value.count('\n')
401 t.lexer.begin('INITIAL')
402 return t
403
404 t_codestream_ignore = ''
405
409
411 print "Error",t.lexer.lexdata[t.lexer.lexpos]
412 t.lexer.skip(1)
413
424
426 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*'
427 t.type=self.reserved.get(t.value,'SUBSTITUTION')
428 if t.value[-1]==")":
429 if t.value.count(")")>t.value.count("("):
430
431 t.value=t.value[:-1]
432 t.lexer.lexpos-=1
433
434 return t
435
436 t_CODESTART = r'\#\{'
437
438 t_CODEEND = r'\#\}'
439
440 t_KANALGITTER = r'\#'
441
442 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
443
444 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
445
446 t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
447
448 literals = "(){};[]"
449
450 t_ignore=" \t\r"
451
452
454 r'\n+'
455 t.lexer.lineno += len(t.value)
456 now=t.lexer.lexpos
457 next=t.lexer.lexdata.find('\n',now)
458 if next>=0:
459 line=t.lexer.lexdata[now:next]
460 pos=line.find("=")
461 if pos>=0:
462 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos):
463 return
464 t.value = line
465 t.type = "REACTION"
466 t.lexer.lineno += 1
467 t.lexer.lexpos = next
468 return t
469
470
471
477
478
481
482
484 'global : header dictbody'
485 p[0] = ( p[1] , p[2] )
486
488 'gotHeader :'
489 p.lexer.lexpos=len(p.lexer.lexdata)
490
491 - def p_noBody(self,p):
492 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' '''
493 p[0] = ( p[3] , {} )
494
496 'noHeader : dictbody'
497 p[0] = ( None , p[1] )
498
500 'pureList : list'
501 p[0] = ( None , p[1] )
502
504 '''pureListWithHeader : header list
505 | header prelist '''
506 p[0] = ( p[1] , p[2] )
507
509 '''boundaryDict : header list
510 | header prelist '''
511
512 p[0] = ( p[1] , p[2] )
513
515 'header : FOAMFILE dictionary'
516 p[0] = p[2]
517 if p[0]["format"]=="binary":
518 self.binaryMode=True
519 raise FatalErrorPyFoamException("Can not parse binary files. It is not implemented")
520 elif p[0]["format"]=="ascii":
521 self.binaryMode=False
522 else:
523 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"])
524
525
527 '''macro : KANALGITTER include
528 | KANALGITTER inputMode
529 | KANALGITTER remove'''
530 p[0] = p[1]+p[2]+"\n"
531 if self.doMacros:
532 p[0]="// "+p[0]
533
535 '''include : INCLUDE SCONST
536 | INCLUDEIFPRESENT SCONST'''
537 if self.doMacros:
538 fName=path.join(self.directory(),p[2][1:-1])
539 read=True
540 if p[1]=="includeIfPresent" and not path.exists(fName):
541 read=False
542 if read and not path.exists(fName):
543 raise PyFoamParserError("The included file "+fName+" does not exist")
544 if read:
545 data=ParsedParameterFile(fName,noHeader=True)
546 into=self.dictStack[-1]
547 for k in data:
548 into[k]=data[k]
549
550 p[0] = p[1] + " " + p[2]
551
561
563 '''remove : REMOVE word
564 | REMOVE wlist'''
565 p[0] = p[1] + " "
566 if type(p[2])==str:
567 p[0]+=p[2]
568 else:
569 p[0]+="( "
570 for w in p[2]:
571 p[0]+=w+" "
572 p[0]+=")"
573
575 '''integer : ICONST'''
576 p[0] = int(p[1])
577
579 '''integer : FCONST'''
580 p[0] = float(p[1])
581
585
587 '''exit_dict :'''
588 p[0]=self.dictStack.pop()
589
591 '''dictionary : '{' enter_dict dictbody '}' exit_dict
592 | '{' '}' '''
593 if len(p)==6:
594 p[0] = p[5]
595 else:
596 p[0] = DictProxy()
597
598 - def p_dictbody(self,p):
599 '''dictbody : dictbody dictline
600 | dictline
601 | empty'''
602
603 if len(p)==3:
604 p[0]=p[1]
605 if self.duplicateCheck:
606 if p[2][0] in p[0]:
607 if self.duplicateFail:
608 error("Key",p[2][0],"already defined")
609 else:
610 warning("Key",p[2][0],"already defined")
611 p[0][p[2][0]]=p[2][1]
612 p[0].addDecoration(p[2][0],self.getDecoration())
613 else:
614 p[0]=self.dictStack[-1]
615
616 if p[1]:
617 p[0][p[1][0]]=p[1][1]
618
619
637
639 '''wlist : '(' wordlist ')' '''
640 p[0] = p[2]
641
643 '''unparsed : UNPARSEDCHUNK'''
644 p[0] = Unparsed(p[1])
645
647 '''prelist_seen : '''
648 if self.listLengthUnparsed!=None:
649 if int(p[-1])>=self.listLengthUnparsed:
650 p.lexer.begin('unparsed')
651 p.lexer.level=0
652 p.lexer.code_start = p.lexer.lexpos
653
655 '''codestream : codeSeen CODESTART CODESTREAMCHUNK CODEEND '''
656 p[0] = Codestream(p[3])
657
659 '''codeSeen : '''
660 p.lexer.begin('codestream')
661 p.lexer.level=0
662 p.lexer.code_start = p.lexer.lexpos
663
665 '''prelist : integer prelist_seen '(' itemlist ')'
666 | integer prelist_seen '(' unparsed ')' '''
667 if type(p[4])==Unparsed:
668 p[0] = UnparsedList(int(p[1]),p[4].data)
669 else:
670 p[0] = self.condenseAllPreFixLists(p[4])
671
673 '''itemlist : itemlist item
674 | item '''
675 if len(p)==2:
676 if p[1]==None:
677 p[0]=[]
678 else:
679 p[0]=[ p[1] ]
680 else:
681 p[0]=p[1]
682 p[0].append(p[2])
683
685 '''wordlist : wordlist word
686 | word '''
687 if len(p)==2:
688 if p[1]==None:
689 p[0]=[]
690 else:
691 p[0]=[ p[1] ]
692 else:
693 p[0]=p[1]
694 p[0].append(p[2])
695
697 '''word : NAME
698 | UNIFORM
699 | NONUNIFORM
700 | MERGE
701 | OVERWRITE
702 | DEFAULT
703 | WARN
704 | PROTECT
705 | ERROR'''
706 p[0]=p[1]
707
709 '''substitution : SUBSTITUTION'''
710 if self.doMacros:
711 nm=p[1][1:]
712 p[0]="<Symbol '"+nm+"' not found>"
713 if nm in self.dictStack[0]:
714 p[0]=deepcopy(self.dictStack[0][nm])
715 else:
716 p[0]=p[1]
717
719 '''dictkey : word
720 | SCONST'''
721 p[0]=p[1]
722
724 '''dictline : dictkey dictitem ';'
725 | dictkey list ';'
726 | dictkey prelist ';'
727 | dictkey fieldvalue ';'
728 | macro
729 | substitution ';'
730 | dictkey codestream ';'
731 | dictkey dictionary'''
732 if len(p)==4 and type(p[2])==list:
733
734 doAgain=True
735 tmp=p[2]
736 while doAgain:
737 doAgain=False
738 for i in range(len(tmp)-1):
739 if type(tmp[i])==int and type(tmp[i+1]) in [list]:
740 if tmp[i]==len(tmp[i+1]):
741 nix=tmp[:i]+tmp[i+1:]
742 for i in range(len(tmp)):
743 tmp.pop()
744 tmp.extend(nix)
745 doAgain=True
746 break
747 if len(p)==4:
748 p[0] = ( p[1] , p[2] )
749 elif len(p)==3:
750 if p[2]==';':
751 p[0]= (p[1],'')
752 else:
753 p[0] = ( p[1] , p[2] )
754 else:
755 p[0] = ( self.emptyCnt , p[1] )
756 self.emptyCnt+=1
757
759 '''number : integer
760 | FCONST'''
761 p[0] = p[1]
762
764 '''dimension : '[' number number number number number number number ']'
765 | '[' number number number number number ']' '''
766 result=p[2:-1]
767 if len(result)==5:
768 result+=[0,0]
769
770 p[0]=apply(Dimension,result)
771
773 '''vector : '(' number number number ')' '''
774 p[0]=apply(Vector,p[2:5])
775
777 '''tensor : '(' number number number number number number number number number ')' '''
778 p[0]=apply(Tensor,p[2:11])
779
781 '''symmtensor : '(' number number number number number number ')' '''
782 p[0]=apply(SymmTensor,p[2:8])
783
790
795
797 '''dictitem : longitem
798 | pitem'''
799 if type(p[1])==tuple:
800 if len(p[1])==2 and p[1][0]=="uniform":
801 p[0]=Field(p[1][1])
802 elif len(p[1])==3 and p[1][0]=="nonuniform":
803 p[0]=Field(p[1][2],name=p[1][1])
804 else:
805 p[0]=TupleProxy(p[1])
806 else:
807 p[0] = p[1]
808
810 '''longitem : pitemlist pitem'''
811 p[0] = p[1]+(p[2],)
812
814 '''pitemlist : pitemlist pitem
815 | pitem '''
816 if len(p)==2:
817 p[0]=(p[1],)
818 else:
819
820
821
822 p[0]=p[1]+(p[2],)
823
825 '''pitem : word
826 | SCONST
827 | number
828 | dictionary
829 | list
830 | dimension
831 | substitution
832 | empty'''
833 p[0] = p[1]
834
836 '''item : pitem
837 | REACTION
838 | list
839 | dictionary'''
840 p[0] = p[1]
841
845
848
849
850
856
858 result="Error in PyFoamParser: '"+self.descr+"'"
859 if self.data!=None:
860 val=self.data.value
861 if len(val)>100:
862 val=val[:40]+" .... "+val[-40:]
863
864 result+=" @ %r (Type: %s ) in line %d at position %d" % (val,
865 self.data.type,
866 self.data.lineno,
867 self.data.lexpos)
868
869 return result
870
873
875 """Convenience class that parses only a headerless OpenFOAM dictionary"""
876
877 - def __init__(self,
878 content,
879 debug=False,
880 duplicateCheck=False,
881 duplicateFail=False):
882 """@param content: the string to be parsed
883 @param debug: output debug information during parsing"""
884
885 FoamFileParser.__init__(self,
886 content,
887 debug=debug,
888 noHeader=True,
889 boundaryDict=False,
890 duplicateCheck=duplicateCheck,
891 duplicateFail=duplicateFail)
892
895
897 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file"""
898
899 - def __init__(self,name,backup=False,debug=False):
904
905 - def parse(self,content):
906 """Constructs a representation of the file"""
907 temp=ParsedParameterFile.parse(self,content)
908 self.content={}
909 for i in range(0,len(temp),2):
910 self.content[temp[i]]=temp[i+1]
911 return self.content
912
914 string="// File generated by PyFoam - sorry for the ugliness\n\n"
915 temp=[]
916 for k,v in self.content.iteritems():
917 temp.append((k,v))
918
919 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"])))
920
921 temp2=[]
922
923 for b in temp:
924 temp2.append(b[0])
925 temp2.append(b[1])
926
927 generator=FoamFileGenerator(temp2,header=self.header)
928 string+=str(generator)
929
930 return string
931
933 """Only parse the header of a file"""
934
937
939 return self.header[name]
940
942 return name in self.header
943
945 return len(self.header)
946