1
2 """
3 Application class that implements pyFoamCasedReport.py
4 """
5
6 import sys,string
7 from optparse import OptionGroup
8
9 from fnmatch import fnmatch
10
11 from PyFoamApplication import PyFoamApplication
12 from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory
13 from PyFoam.RunDictionary.BoundaryDict import BoundaryDict
14 from PyFoam.RunDictionary.MeshInformation import MeshInformation
15 from PyFoam.RunDictionary.ParsedParameterFile import PyFoamParserError,ParsedBoundaryDict,ParsedParameterFile
16 from PyFoam.Basics.RestructuredTextHelper import RestructuredTextHelper
17 from PyFoam.Basics.DataStructures import DictProxy
18
19 from PyFoam.Error import error,warning
20
21 from math import log10,ceil
22 from os import path
23
26 description="""
27 Produces human-readable reports about a case. Attention: the amount of
28 information in the reports is limited. The truth is always in the
29 dictionary-files.
30
31 The format of the output is restructured-text so it can be run through a
32 postprocessor like rst2tex or rst2html to produce PDF or HTML respectivly
33 """
34
35 PyFoamApplication.__init__(self,
36 args=args,
37 description=description,
38 usage="%prog [options] <casedir>",
39 nr=1,
40 changeVersion=False,
41 interspersed=True)
42
44 report=OptionGroup(self.parser,
45 "Reports",
46 "What kind of reports should be produced")
47 self.parser.add_option_group(report)
48 select=OptionGroup(self.parser,
49 "Selection",
50 "Which data should be used for the reports")
51 self.parser.add_option_group(select)
52 internal=OptionGroup(self.parser,
53 "Internal",
54 "Details of the parser")
55 self.parser.add_option_group(internal)
56
57 format=OptionGroup(self.parser,
58 "Formatting",
59 "Restructured Text formatting")
60 self.parser.add_option_group(format)
61
62 format.add_option("--heading-level",
63 action="store",
64 type="int",
65 default=2,
66 dest="headingLevel",
67 help="Default level of the headings. Valid values from 0 to 5. Default: %default")
68
69 output=OptionGroup(self.parser,
70 "Output",
71 "How Output should be generated")
72 self.parser.add_option_group(output)
73
74 output.add_option("--file",
75 action="store",
76 default=None,
77 dest="file",
78 help="Write the output to a file instead of the console")
79
80 report.add_option("--full-report",
81 action="store_true",
82 default=False,
83 dest="all",
84 help="Print all available reports at once")
85
86 report.add_option("--short-bc-report",
87 action="store_true",
88 default=False,
89 dest="shortBCreport",
90 help="Gives a short overview of the boundary-conditions in the case")
91
92 report.add_option("--long-bc-report",
93 action="store_true",
94 default=False,
95 dest="longBCreport",
96 help="Gives a full overview of the boundary-conditions in the case")
97
98 report.add_option("--dimensions",
99 action="store_true",
100 default=False,
101 dest="dimensions",
102 help="Show the dimensions of the fields")
103
104 report.add_option("--internal-field",
105 action="store_true",
106 default=False,
107 dest="internal",
108 help="Show the internal value of the fields (the initial conditions)")
109
110 report.add_option("--linear-solvers",
111 action="store_true",
112 default=False,
113 dest="linearSolvers",
114 help="Print the linear solvers and their tolerance")
115
116 report.add_option("--relaxation-factors",
117 action="store_true",
118 default=False,
119 dest="relaxationFactors",
120 help="Print the relaxation factors (if there are any)")
121
122 select.add_option("--time",
123 action="store",
124 type="float",
125 default=None,
126 dest="time",
127 help="Time to use as the basis for the reports")
128
129 select.add_option("--region",
130 dest="region",
131 default=None,
132 help="Do the report for a special region for multi-region cases")
133
134 select.add_option("--all-regions",
135 dest="allRegions",
136 action="store_true",
137 default=False,
138 help="Do the report for all regions for multi-region cases")
139
140 select.add_option("--parallel",
141 action="store_true",
142 default=False,
143 dest="parallel",
144 help="Get times from the processor-directories")
145
146 internal.add_option("--long-field-threshold",
147 action="store",
148 type="int",
149 default=100,
150 dest="longlist",
151 help="Fields that are longer than this won't be parsed, but read into memory (and compared as strings). Default: %default")
152 internal.add_option("--no-do-macro-expansion",
153 action="store_false",
154 default=True,
155 dest="doMacros",
156 help="Don't expand macros with $ and # in the field-files")
157
158 select.add_option("--patches",
159 action="append",
160 default=None,
161 dest="patches",
162 help="Patches which should be processed (pattern, can be used more than once)")
163
164 select.add_option("--exclude-patches",
165 action="append",
166 default=None,
167 dest="expatches",
168 help="Patches which should not be processed (pattern, can be used more than once)")
169
170 report.add_option("--processor-matrix",
171 action="store_true",
172 default=False,
173 dest="processorMatrix",
174 help="Prints the matrix how many faces from one processor interact with another")
175
176 report.add_option("--case-size",
177 action="store_true",
178 default=False,
179 dest="caseSize",
180 help="Report the number of cells, points and faces in the case")
181
182 report.add_option("--decomposition",
183 action="store_true",
184 default=False,
185 dest="decomposition",
186 help="Reports the size of the parallel decomposition")
187
189 if self.opts.file:
190 sys.stdout=open(self.opts.file,"w")
191
192 if self.opts.allRegions:
193 sol=SolutionDirectory(self.parser.getArgs()[0],
194 archive=None,
195 parallel=self.opts.parallel,
196 paraviewLink=False)
197 for r in sol.getRegions():
198 self.doRegion(r)
199 else:
200 self.doRegion(self.opts.region)
201
203 ReST=RestructuredTextHelper(defaultHeading=self.opts.headingLevel)
204
205 if self.opts.allRegions:
206 print ReST.buildHeading("Region: ",theRegion,level=self.opts.headingLevel-1)
207
208 sol=SolutionDirectory(self.parser.getArgs()[0],
209 archive=None,
210 parallel=self.opts.parallel,
211 paraviewLink=False,
212 region=theRegion)
213
214 if self.opts.all:
215 self.opts.caseSize=True
216 self.opts.shortBCreport=True
217 self.opts.longBCreport=True
218 self.opts.dimensions=True
219 self.opts.internal=True
220 self.opts.linearSolvers=True
221 self.opts.relaxationFactors=True
222 self.opts.processorMatrix=True
223 self.opts.decomposition=True
224
225 if self.opts.time:
226 try:
227 self.opts.time=sol.timeName(sol.timeIndex(self.opts.time,minTime=True))
228 except IndexError:
229 error("The specified time",self.opts.time,"doesn't exist in the case")
230 print "Using time t="+self.opts.time+"\n"
231
232 needsPolyBoundaries=False
233 needsInitialTime=False
234
235 if self.opts.longBCreport:
236 needsPolyBoundaries=True
237 needsInitialTime=True
238 if self.opts.shortBCreport:
239 needsPolyBoundaries=True
240 needsInitialTime=True
241 if self.opts.dimensions:
242 needsInitialTime=True
243 if self.opts.internal:
244 needsInitialTime=True
245 if self.opts.decomposition:
246 needsPolyBoundaries=True
247
248 defaultProc=None
249 if self.opts.parallel:
250 defaultProc=0
251
252 if needsPolyBoundaries:
253 proc=None
254 boundary=BoundaryDict(sol.name,
255 region=theRegion,
256 time=self.opts.time,
257 processor=defaultProc)
258
259 boundMaxLen=0
260 boundaryNames=[]
261 for b in boundary:
262 if b.find("procBoundary")!=0:
263 boundaryNames.append(b)
264 if self.opts.patches!=None:
265 tmp=boundaryNames
266 boundaryNames=[]
267 for b in tmp:
268 for p in self.opts.patches:
269 if fnmatch(b,p):
270 boundaryNames.append(b)
271 break
272
273 if self.opts.expatches!=None:
274 tmp=boundaryNames
275 boundaryNames=[]
276 for b in tmp:
277 keep=True
278 for p in self.opts.expatches:
279 if fnmatch(b,p):
280 keep=False
281 break
282 if keep:
283 boundaryNames.append(b)
284
285 for b in boundaryNames:
286 boundMaxLen=max(boundMaxLen,len(b))
287 boundaryNames.sort()
288
289 if self.opts.time==None:
290 procTime="constant"
291 else:
292 procTime=self.opts.time
293
294 if needsInitialTime:
295 fields={}
296
297 if self.opts.time==None:
298 try:
299 time=sol.timeName(0)
300 except IndexError:
301 error("There is no timestep in the case")
302 else:
303 time=self.opts.time
304
305 tDir=sol[time]
306
307 nameMaxLen=0
308
309 for f in tDir:
310 try:
311 fields[f.baseName()]=f.getContent(listLengthUnparsed=self.opts.longlist,
312 doMacroExpansion=self.opts.doMacros)
313 nameMaxLen=max(nameMaxLen,len(f.baseName()))
314 except PyFoamParserError,e:
315 warning("Couldn't parse",f.name,"because of an error:",e," -> skipping")
316
317 fieldNames=fields.keys()
318 fieldNames.sort()
319
320 if self.opts.caseSize:
321 print ReST.heading("Size of the case")
322
323 nFaces=0
324 nPoints=0
325 nCells=0
326 if self.opts.parallel:
327 procs=range(sol.nrProcs())
328 print "Accumulated from",sol.nrProcs(),"processors"
329 else:
330 procs=[None]
331
332 for p in procs:
333 info=MeshInformation(sol.name,
334 processor=p,
335 region=theRegion,
336 time=self.opts.time)
337 nFaces+=info.nrOfFaces()
338 nPoints+=info.nrOfPoints()
339 try:
340 nCells+=info.nrOfCells()
341 except:
342 nCells="Not available"
343 tab=ReST.table()
344 tab[0]=("Faces",nFaces)
345 tab[1]=("Points",nPoints)
346 tab[2]=("Cells",nCells)
347 print tab
348
349 if self.opts.decomposition:
350 print ReST.heading("Decomposition")
351
352 if sol.nrProcs()<2:
353 print "This case is not decomposed"
354 else:
355 print "Case is decomposed for",sol.nrProcs(),"processors"
356 print
357
358 nCells=[]
359 nFaces=[]
360 nPoints=[]
361 for p in sol.processorDirs():
362 info=MeshInformation(sol.name,
363 processor=p,
364 region=theRegion,
365 time=self.opts.time)
366 nPoints.append(info.nrOfPoints())
367 nFaces.append(info.nrOfFaces())
368 nCells.append(info.nrOfCells())
369
370 digits=int(ceil(log10(max(sol.nrProcs(),
371 max(nCells),
372 max(nFaces),
373 max(nPoints)
374 ))))+2
375 nameLen=max(len("Points"),boundMaxLen)
376
377 tab=ReST.table()
378 tab[0]=["CPU"]+range(sol.nrProcs())
379
380 tab.addLine()
381
382 tab[1]=["Points"]+nPoints
383 tab[2]=["Faces"]+nFaces
384 tab[3]=["Cells"]+nCells
385 tab.addLine(head=True)
386
387 nr=3
388 for b in boundaryNames:
389 nr+=1
390 tab[(nr,0)]=b
391 for i,p in enumerate(sol.processorDirs()):
392 try:
393 nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p,
394 region=theRegion,
395 time=self.opts.time)
396 )[b]["nFaces"]
397 except IOError:
398 nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p,
399 region=theRegion)
400 )[b]["nFaces"]
401 except KeyError:
402 nFaces=0
403
404 tab[(nr,i+1)]=nFaces
405
406 print tab
407
408 if self.opts.longBCreport:
409 print ReST.heading("The boundary conditions for t =",time)
410
411 for b in boundaryNames:
412 print ReST.buildHeading("Boundary: ",b,level=self.opts.headingLevel+1)
413 bound=boundary[b]
414 print ":Type:\t",bound["type"]
415 if "physicalType" in bound:
416 print ":Physical:\t",bound["physicalType"]
417 print ":Faces:\t",bound["nFaces"]
418 print
419 heads=["Field","type"]
420 tab=ReST.table()
421 tab[0]=heads
422 tab.addLine(head=True)
423 for row,fName in enumerate(fieldNames):
424 tab[(row+1,0)]=fName
425 f=fields[fName]
426 if "boundaryField" not in f:
427 tab[(row+1,1)]="Not a field file"
428 elif b not in f["boundaryField"]:
429 tab[(row+1,1)]="MISSING !!!"
430 else:
431 bf=f["boundaryField"][b]
432
433 for k in bf:
434 try:
435 col=heads.index(k)
436 except ValueError:
437 col=len(heads)
438 tab[(0,col)]=k
439 heads.append(k)
440 cont=str(bf[k])
441 if cont.find("\n")>=0:
442 tab[(row+1,col)]=cont[:cont.find("\n")]+"..."
443 else:
444 tab[(row+1,col)]=cont
445 print tab
446
447 if self.opts.shortBCreport:
448 print ReST.heading("Table of boundary conditions for t =",time)
449
450 types={}
451 hasPhysical=False
452 for b in boundary:
453 if "physicalType" in boundary[b]:
454 hasPhysical=True
455
456 types[b]={}
457
458 for fName in fields:
459 f=fields[fName]
460 try:
461 if b not in f["boundaryField"]:
462 types[b][fName]="MISSING"
463 else:
464 types[b][fName]=f["boundaryField"][b]["type"]
465 except KeyError:
466 types[b][fName]="Not a field"
467
468 tab=ReST.table()
469 tab[0]=[""]+boundaryNames
470 tab.addLine()
471 tab[(1,0)]="Patch Type"
472 for i,b in enumerate(boundaryNames):
473 tab[(1,i+1)]=boundary[b]["type"]
474
475 nr=2
476 if hasPhysical:
477 tab[(nr,0)]="Physical Type"
478 for i,b in enumerate(boundaryNames):
479 if "physicalType" in boundary[b]:
480 tab[(nr,i+1)]=boundary[b]["physicalType"]
481 nr+=1
482
483 tab[(nr,0)]="Length"
484 for i,b in enumerate(boundaryNames):
485 tab[(nr,i+1)]=boundary[b]["nFaces"]
486 nr+=1
487 tab.addLine(head=True)
488
489 for fName in fieldNames:
490 tab[(nr,0)]=fName
491 for i,b in enumerate(boundaryNames):
492 tab[(nr,i+1)]=types[b][fName]
493 nr+=1
494
495 print tab
496
497 if self.opts.dimensions:
498 print ReST.heading("Dimensions of fields for t =",time)
499
500 tab=ReST.table()
501 tab[0]=["Name"]+"[ kg m s K mol A cd ]".split()[1:-1]
502 tab.addLine(head=True)
503 for i,fName in enumerate(fieldNames):
504 f=fields[fName]
505 try:
506 dim=str(f["dimensions"]).split()[1:-1]
507 except KeyError:
508 dim=["-"]*7
509 tab[i+1]=[fName]+dim
510 print tab
511
512 if self.opts.internal:
513 print ReST.heading("Internal value of fields for t =",time)
514
515 tab=ReST.table()
516 tab[0]=["Name","Value"]
517 tab.addLine(head=True)
518 for i,fName in enumerate(fieldNames):
519 f=fields[fName]
520
521 try:
522 cont=str(f["internalField"])
523 if cont.find("\n")>=0:
524 val=cont[:cont.find("\n")]+"..."
525 else:
526 val=cont
527 except KeyError:
528 val="Not a field file"
529 tab[i+1]=[fName,val]
530 print tab
531
532 if self.opts.processorMatrix:
533 print ReST.heading("Processor matrix")
534
535 if sol.nrProcs()<2:
536 print "This case is not decomposed"
537 else:
538 matrix=[ [0,]*sol.nrProcs() for i in range(sol.nrProcs())]
539
540 for i,p in enumerate(sol.processorDirs()):
541 try:
542 bound=ParsedBoundaryDict(sol.boundaryDict(processor=p,
543 region=theRegion,
544 time=self.opts.time))
545 except IOError:
546 bound=ParsedBoundaryDict(sol.boundaryDict(processor=p,
547 region=theRegion))
548
549 for j in range(sol.nrProcs()):
550 name="procBoundary%dto%d" %(j,i)
551 name2="procBoundary%dto%d" %(i,j)
552 if name in bound:
553 matrix[i][j]=bound[name]["nFaces"]
554 if name2 in bound:
555 matrix[i][j]=bound[name2]["nFaces"]
556
557 print "Matrix of processor interactions (faces)"
558 print
559
560 tab=ReST.table()
561 tab[0]=["CPU"]+range(sol.nrProcs())
562 tab.addLine(head=True)
563
564 for i,col in enumerate(matrix):
565 tab[i+1]=[i]+matrix[i]
566
567 print tab
568
569 if self.opts.linearSolvers:
570 print ReST.heading("Linear Solvers")
571
572 linTable=ReST.table()
573
574 fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"))
575 allInfo={}
576 for sName in fvSol["solvers"]:
577 raw=fvSol["solvers"][sName]
578 info={}
579 info["solver"]=raw[0]
580
581 if type(raw[1]) in [dict,DictProxy]:
582 try:
583 info["tolerance"]=raw[1]["tolerance"]
584 except KeyError:
585 info["tolerance"]=1.
586 try:
587 info["relTol"]=raw[1]["relTol"]
588 except KeyError:
589 info["relTol"]=0.
590 else:
591 info["tolerance"]=raw[1]
592 info["relTol"]=raw[2]
593
594 allInfo[sName]=info
595
596 linTable[0]=["Name","Solver","Abs. Tolerance","Relative Tol."]
597 linTable.addLine(head=True)
598
599 nr=0
600 for n,i in allInfo.iteritems():
601 nr+=1
602 linTable[nr]=(n,i["solver"],i["tolerance"],i["relTol"])
603 print linTable
604
605 if self.opts.relaxationFactors:
606 print ReST.heading("Relaxation")
607
608 fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"))
609 if "relaxationFactors" in fvSol:
610 tab=ReST.table()
611 tab[0]=["Name","Factor"]
612 tab.addLine(head=True)
613 nr=0
614 for n,f in fvSol["relaxationFactors"].iteritems():
615 nr+=1
616 tab[nr]=[n,f]
617 print tab
618 else:
619 print "No relaxation factors defined for this case"
620