1
2 """
3 Application class that implements pyFoamCasedReport.py
4 """
5
6 import sys,string
7 from optparse import OptionGroup
8
9 from fnmatch import fnmatch
10
11 from PyFoamApplication import PyFoamApplication
12 from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory
13 from PyFoam.RunDictionary.BoundaryDict import BoundaryDict
14 from PyFoam.RunDictionary.MeshInformation import MeshInformation
15 from PyFoam.RunDictionary.ParsedParameterFile import PyFoamParserError,ParsedBoundaryDict,ParsedParameterFile
16 from PyFoam.Basics.RestructuredTextHelper import RestructuredTextHelper
17 from PyFoam.Basics.DataStructures import DictProxy
18
19 from PyFoam.Error import error,warning
20
21 from math import log10,ceil
22 from os import path
23
26 description="""\
27 Produces human-readable reports about a case. Attention: the amount of
28 information in the reports is limited. The truth is always in the
29 dictionary-files.
30
31 The format of the output is restructured-text so it can be run through
32 a postprocessor like rst2tex or rst2html to produce PDF or HTML
33 respectivly
34 """
35
36 PyFoamApplication.__init__(self,
37 args=args,
38 description=description,
39 usage="%prog [options] <casedir>",
40 nr=1,
41 changeVersion=False,
42 interspersed=True)
43
45 report=OptionGroup(self.parser,
46 "Reports",
47 "What kind of reports should be produced")
48 self.parser.add_option_group(report)
49 select=OptionGroup(self.parser,
50 "Selection",
51 "Which data should be used for the reports")
52 self.parser.add_option_group(select)
53 internal=OptionGroup(self.parser,
54 "Internal",
55 "Details of the parser")
56 self.parser.add_option_group(internal)
57
58 format=OptionGroup(self.parser,
59 "Formatting",
60 "Restructured Text formatting")
61 self.parser.add_option_group(format)
62
63 format.add_option("--heading-level",
64 action="store",
65 type="int",
66 default=2,
67 dest="headingLevel",
68 help="Default level of the headings. Valid values from 0 to 5. Default: %default")
69
70 output=OptionGroup(self.parser,
71 "Output",
72 "How Output should be generated")
73 self.parser.add_option_group(output)
74
75 output.add_option("--file",
76 action="store",
77 default=None,
78 dest="file",
79 help="Write the output to a file instead of the console")
80
81 report.add_option("--full-report",
82 action="store_true",
83 default=False,
84 dest="all",
85 help="Print all available reports at once")
86
87 report.add_option("--short-bc-report",
88 action="store_true",
89 default=False,
90 dest="shortBCreport",
91 help="Gives a short overview of the boundary-conditions in the case")
92
93 report.add_option("--long-bc-report",
94 action="store_true",
95 default=False,
96 dest="longBCreport",
97 help="Gives a full overview of the boundary-conditions in the case")
98
99 report.add_option("--dimensions",
100 action="store_true",
101 default=False,
102 dest="dimensions",
103 help="Show the dimensions of the fields")
104
105 report.add_option("--internal-field",
106 action="store_true",
107 default=False,
108 dest="internal",
109 help="Show the internal value of the fields (the initial conditions)")
110
111 report.add_option("--linear-solvers",
112 action="store_true",
113 default=False,
114 dest="linearSolvers",
115 help="Print the linear solvers and their tolerance")
116
117 report.add_option("--relaxation-factors",
118 action="store_true",
119 default=False,
120 dest="relaxationFactors",
121 help="Print the relaxation factors (if there are any)")
122
123 select.add_option("--time",
124 action="store",
125 type="float",
126 default=None,
127 dest="time",
128 help="Time to use as the basis for the reports")
129
130 select.add_option("--region",
131 dest="region",
132 default=None,
133 help="Do the report for a special region for multi-region cases")
134
135 select.add_option("--all-regions",
136 dest="allRegions",
137 action="store_true",
138 default=False,
139 help="Do the report for all regions for multi-region cases")
140
141 select.add_option("--parallel",
142 action="store_true",
143 default=False,
144 dest="parallel",
145 help="Get times from the processor-directories")
146
147 internal.add_option("--long-field-threshold",
148 action="store",
149 type="int",
150 default=100,
151 dest="longlist",
152 help="Fields that are longer than this won't be parsed, but read into memory (and compared as strings). Default: %default")
153 internal.add_option("--no-do-macro-expansion",
154 action="store_false",
155 default=True,
156 dest="doMacros",
157 help="Don't expand macros with $ and # in the field-files")
158
159 select.add_option("--patches",
160 action="append",
161 default=None,
162 dest="patches",
163 help="Patches which should be processed (pattern, can be used more than once)")
164
165 select.add_option("--exclude-patches",
166 action="append",
167 default=None,
168 dest="expatches",
169 help="Patches which should not be processed (pattern, can be used more than once)")
170
171 report.add_option("--processor-matrix",
172 action="store_true",
173 default=False,
174 dest="processorMatrix",
175 help="Prints the matrix how many faces from one processor interact with another")
176
177 report.add_option("--case-size",
178 action="store_true",
179 default=False,
180 dest="caseSize",
181 help="Report the number of cells, points and faces in the case")
182
183 report.add_option("--decomposition",
184 action="store_true",
185 default=False,
186 dest="decomposition",
187 help="Reports the size of the parallel decomposition")
188
190 if self.opts.file:
191 sys.stdout=open(self.opts.file,"w")
192
193 if self.opts.allRegions:
194 sol=SolutionDirectory(self.parser.getArgs()[0],
195 archive=None,
196 parallel=self.opts.parallel,
197 paraviewLink=False)
198 for r in sol.getRegions():
199 self.doRegion(r)
200 else:
201 self.doRegion(self.opts.region)
202
204 ReST=RestructuredTextHelper(defaultHeading=self.opts.headingLevel)
205
206 if self.opts.allRegions:
207 print ReST.buildHeading("Region: ",theRegion,level=self.opts.headingLevel-1)
208
209 sol=SolutionDirectory(self.parser.getArgs()[0],
210 archive=None,
211 parallel=self.opts.parallel,
212 paraviewLink=False,
213 region=theRegion)
214
215 if self.opts.all:
216 self.opts.caseSize=True
217 self.opts.shortBCreport=True
218 self.opts.longBCreport=True
219 self.opts.dimensions=True
220 self.opts.internal=True
221 self.opts.linearSolvers=True
222 self.opts.relaxationFactors=True
223 self.opts.processorMatrix=True
224 self.opts.decomposition=True
225
226 if self.opts.time:
227 try:
228 self.opts.time=sol.timeName(sol.timeIndex(self.opts.time,minTime=True))
229 except IndexError:
230 error("The specified time",self.opts.time,"doesn't exist in the case")
231 print "Using time t="+self.opts.time+"\n"
232
233 needsPolyBoundaries=False
234 needsInitialTime=False
235
236 if self.opts.longBCreport:
237 needsPolyBoundaries=True
238 needsInitialTime=True
239 if self.opts.shortBCreport:
240 needsPolyBoundaries=True
241 needsInitialTime=True
242 if self.opts.dimensions:
243 needsInitialTime=True
244 if self.opts.internal:
245 needsInitialTime=True
246 if self.opts.decomposition:
247 needsPolyBoundaries=True
248
249 defaultProc=None
250 if self.opts.parallel:
251 defaultProc=0
252
253 if needsPolyBoundaries:
254 proc=None
255 boundary=BoundaryDict(sol.name,
256 region=theRegion,
257 time=self.opts.time,
258 processor=defaultProc)
259
260 boundMaxLen=0
261 boundaryNames=[]
262 for b in boundary:
263 if b.find("procBoundary")!=0:
264 boundaryNames.append(b)
265 if self.opts.patches!=None:
266 tmp=boundaryNames
267 boundaryNames=[]
268 for b in tmp:
269 for p in self.opts.patches:
270 if fnmatch(b,p):
271 boundaryNames.append(b)
272 break
273
274 if self.opts.expatches!=None:
275 tmp=boundaryNames
276 boundaryNames=[]
277 for b in tmp:
278 keep=True
279 for p in self.opts.expatches:
280 if fnmatch(b,p):
281 keep=False
282 break
283 if keep:
284 boundaryNames.append(b)
285
286 for b in boundaryNames:
287 boundMaxLen=max(boundMaxLen,len(b))
288 boundaryNames.sort()
289
290 if self.opts.time==None:
291 procTime="constant"
292 else:
293 procTime=self.opts.time
294
295 if needsInitialTime:
296 fields={}
297
298 if self.opts.time==None:
299 try:
300 time=sol.timeName(0)
301 except IndexError:
302 error("There is no timestep in the case")
303 else:
304 time=self.opts.time
305
306 tDir=sol[time]
307
308 nameMaxLen=0
309
310 for f in tDir:
311 try:
312 fields[f.baseName()]=f.getContent(listLengthUnparsed=self.opts.longlist,
313 doMacroExpansion=self.opts.doMacros)
314 nameMaxLen=max(nameMaxLen,len(f.baseName()))
315 except PyFoamParserError,e:
316 warning("Couldn't parse",f.name,"because of an error:",e," -> skipping")
317
318 fieldNames=fields.keys()
319 fieldNames.sort()
320
321 if self.opts.caseSize:
322 print ReST.heading("Size of the case")
323
324 nFaces=0
325 nPoints=0
326 nCells=0
327 if self.opts.parallel:
328 procs=range(sol.nrProcs())
329 print "Accumulated from",sol.nrProcs(),"processors"
330 else:
331 procs=[None]
332
333 for p in procs:
334 info=MeshInformation(sol.name,
335 processor=p,
336 region=theRegion,
337 time=self.opts.time)
338 nFaces+=info.nrOfFaces()
339 nPoints+=info.nrOfPoints()
340 try:
341 nCells+=info.nrOfCells()
342 except:
343 nCells="Not available"
344 tab=ReST.table()
345 tab[0]=("Faces",nFaces)
346 tab[1]=("Points",nPoints)
347 tab[2]=("Cells",nCells)
348 print tab
349
350 if self.opts.decomposition:
351 print ReST.heading("Decomposition")
352
353 if sol.nrProcs()<2:
354 print "This case is not decomposed"
355 else:
356 print "Case is decomposed for",sol.nrProcs(),"processors"
357 print
358
359 nCells=[]
360 nFaces=[]
361 nPoints=[]
362 for p in sol.processorDirs():
363 info=MeshInformation(sol.name,
364 processor=p,
365 region=theRegion,
366 time=self.opts.time)
367 nPoints.append(info.nrOfPoints())
368 nFaces.append(info.nrOfFaces())
369 nCells.append(info.nrOfCells())
370
371 digits=int(ceil(log10(max(sol.nrProcs(),
372 max(nCells),
373 max(nFaces),
374 max(nPoints)
375 ))))+2
376 nameLen=max(len("Points"),boundMaxLen)
377
378 tab=ReST.table()
379 tab[0]=["CPU"]+range(sol.nrProcs())
380
381 tab.addLine()
382
383 tab[1]=["Points"]+nPoints
384 tab[2]=["Faces"]+nFaces
385 tab[3]=["Cells"]+nCells
386 tab.addLine(head=True)
387
388 nr=3
389 for b in boundaryNames:
390 nr+=1
391 tab[(nr,0)]=b
392 for i,p in enumerate(sol.processorDirs()):
393 try:
394 nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p,
395 region=theRegion,
396 time=self.opts.time)
397 )[b]["nFaces"]
398 except IOError:
399 nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p,
400 region=theRegion)
401 )[b]["nFaces"]
402 except KeyError:
403 nFaces=0
404
405 tab[(nr,i+1)]=nFaces
406
407 print tab
408
409 if self.opts.longBCreport:
410 print ReST.heading("The boundary conditions for t =",time)
411
412 for b in boundaryNames:
413 print ReST.buildHeading("Boundary: ",b,level=self.opts.headingLevel+1)
414 bound=boundary[b]
415 print ":Type:\t",bound["type"]
416 if "physicalType" in bound:
417 print ":Physical:\t",bound["physicalType"]
418 print ":Faces:\t",bound["nFaces"]
419 print
420 heads=["Field","type"]
421 tab=ReST.table()
422 tab[0]=heads
423 tab.addLine(head=True)
424 for row,fName in enumerate(fieldNames):
425 tab[(row+1,0)]=fName
426 f=fields[fName]
427 if "boundaryField" not in f:
428 tab[(row+1,1)]="Not a field file"
429 elif b not in f["boundaryField"]:
430 tab[(row+1,1)]="MISSING !!!"
431 else:
432 bf=f["boundaryField"][b]
433
434 for k in bf:
435 try:
436 col=heads.index(k)
437 except ValueError:
438 col=len(heads)
439 tab[(0,col)]=k
440 heads.append(k)
441 cont=str(bf[k])
442 if cont.find("\n")>=0:
443 tab[(row+1,col)]=cont[:cont.find("\n")]+"..."
444 else:
445 tab[(row+1,col)]=cont
446 print tab
447
448 if self.opts.shortBCreport:
449 print ReST.heading("Table of boundary conditions for t =",time)
450
451 types={}
452 hasPhysical=False
453 for b in boundary:
454 if "physicalType" in boundary[b]:
455 hasPhysical=True
456
457 types[b]={}
458
459 for fName in fields:
460 f=fields[fName]
461 try:
462 if b not in f["boundaryField"]:
463 types[b][fName]="MISSING"
464 else:
465 types[b][fName]=f["boundaryField"][b]["type"]
466 except KeyError:
467 types[b][fName]="Not a field"
468
469 tab=ReST.table()
470 tab[0]=[""]+boundaryNames
471 tab.addLine()
472 tab[(1,0)]="Patch Type"
473 for i,b in enumerate(boundaryNames):
474 tab[(1,i+1)]=boundary[b]["type"]
475
476 nr=2
477 if hasPhysical:
478 tab[(nr,0)]="Physical Type"
479 for i,b in enumerate(boundaryNames):
480 if "physicalType" in boundary[b]:
481 tab[(nr,i+1)]=boundary[b]["physicalType"]
482 nr+=1
483
484 tab[(nr,0)]="Length"
485 for i,b in enumerate(boundaryNames):
486 tab[(nr,i+1)]=boundary[b]["nFaces"]
487 nr+=1
488 tab.addLine(head=True)
489
490 for fName in fieldNames:
491 tab[(nr,0)]=fName
492 for i,b in enumerate(boundaryNames):
493 tab[(nr,i+1)]=types[b][fName]
494 nr+=1
495
496 print tab
497
498 if self.opts.dimensions:
499 print ReST.heading("Dimensions of fields for t =",time)
500
501 tab=ReST.table()
502 tab[0]=["Name"]+"[ kg m s K mol A cd ]".split()[1:-1]
503 tab.addLine(head=True)
504 for i,fName in enumerate(fieldNames):
505 f=fields[fName]
506 try:
507 dim=str(f["dimensions"]).split()[1:-1]
508 except KeyError:
509 dim=["-"]*7
510 tab[i+1]=[fName]+dim
511 print tab
512
513 if self.opts.internal:
514 print ReST.heading("Internal value of fields for t =",time)
515
516 tab=ReST.table()
517 tab[0]=["Name","Value"]
518 tab.addLine(head=True)
519 for i,fName in enumerate(fieldNames):
520 f=fields[fName]
521
522 try:
523 cont=str(f["internalField"])
524 if cont.find("\n")>=0:
525 val=cont[:cont.find("\n")]+"..."
526 else:
527 val=cont
528 except KeyError:
529 val="Not a field file"
530 tab[i+1]=[fName,val]
531 print tab
532
533 if self.opts.processorMatrix:
534 print ReST.heading("Processor matrix")
535
536 if sol.nrProcs()<2:
537 print "This case is not decomposed"
538 else:
539 matrix=[ [0,]*sol.nrProcs() for i in range(sol.nrProcs())]
540
541 for i,p in enumerate(sol.processorDirs()):
542 try:
543 bound=ParsedBoundaryDict(sol.boundaryDict(processor=p,
544 region=theRegion,
545 time=self.opts.time))
546 except IOError:
547 bound=ParsedBoundaryDict(sol.boundaryDict(processor=p,
548 region=theRegion))
549
550 for j in range(sol.nrProcs()):
551 name="procBoundary%dto%d" %(j,i)
552 name2="procBoundary%dto%d" %(i,j)
553 if name in bound:
554 matrix[i][j]=bound[name]["nFaces"]
555 if name2 in bound:
556 matrix[i][j]=bound[name2]["nFaces"]
557
558 print "Matrix of processor interactions (faces)"
559 print
560
561 tab=ReST.table()
562 tab[0]=["CPU"]+range(sol.nrProcs())
563 tab.addLine(head=True)
564
565 for i,col in enumerate(matrix):
566 tab[i+1]=[i]+matrix[i]
567
568 print tab
569
570 if self.opts.linearSolvers:
571 print ReST.heading("Linear Solvers")
572
573 linTable=ReST.table()
574
575 fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"))
576 allInfo={}
577 for sName in fvSol["solvers"]:
578 raw=fvSol["solvers"][sName]
579 info={}
580 if type(raw) in [dict,DictProxy]:
581
582 info["solver"]=raw["solver"]
583 solverData=raw
584 else:
585 info["solver"]=raw[0]
586 solverData=raw[1]
587
588 if type(solverData) in [dict,DictProxy]:
589 try:
590 info["tolerance"]=solverData["tolerance"]
591 except KeyError:
592 info["tolerance"]=1.
593 try:
594 info["relTol"]=solverData["relTol"]
595 except KeyError:
596 info["relTol"]=0.
597 else:
598
599 info["tolerance"]=solverData
600 info["relTol"]=raw[2]
601
602 allInfo[sName]=info
603
604 linTable[0]=["Name","Solver","Abs. Tolerance","Relative Tol."]
605 linTable.addLine(head=True)
606
607 nr=0
608 for n,i in allInfo.iteritems():
609 nr+=1
610 linTable[nr]=(n,i["solver"],i["tolerance"],i["relTol"])
611 print linTable
612
613 if self.opts.relaxationFactors:
614 print ReST.heading("Relaxation")
615
616 fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"))
617 if "relaxationFactors" in fvSol:
618 tab=ReST.table()
619 tab[0]=["Name","Factor"]
620 tab.addLine(head=True)
621 nr=0
622 for n,f in fvSol["relaxationFactors"].iteritems():
623 nr+=1
624 tab[nr]=[n,f]
625 print tab
626 else:
627 print "No relaxation factors defined for this case"
628