Package PyFoam :: Package Applications :: Module CaseReport
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.Applications.CaseReport

  1  #  ICE Revision: $Id$ 
  2  """ 
  3  Application class that implements pyFoamCasedReport.py 
  4  """ 
  5   
  6  import sys 
  7  from optparse import OptionGroup 
  8   
  9  from fnmatch import fnmatch 
 10   
 11  from .PyFoamApplication import PyFoamApplication 
 12  from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory 
 13  from PyFoam.RunDictionary.BoundaryDict import BoundaryDict 
 14  from PyFoam.RunDictionary.MeshInformation import MeshInformation 
 15  from PyFoam.RunDictionary.ParsedParameterFile import PyFoamParserError,ParsedBoundaryDict,ParsedParameterFile 
 16  from PyFoam.Basics.RestructuredTextHelper import RestructuredTextHelper 
 17  from PyFoam.Basics.DataStructures import DictProxy,Field 
 18   
 19  from PyFoam.Error import error,warning 
 20   
 21  from PyFoam.ThirdParty.six import print_,iteritems,string_types 
 22   
 23  from math import log10,ceil 
 24  from os import path 
 25   
26 -class CaseReport(PyFoamApplication):
27 - def __init__(self, 28 args=None, 29 **kwargs):
30 description="""\ 31 Produces human-readable reports about a case. Attention: the amount of 32 information in the reports is limited. The truth is always in the 33 dictionary-files. 34 35 The format of the output is restructured-text so it can be run through 36 a postprocessor like rst2tex or rst2html to produce PDF or HTML 37 respectivly 38 """ 39 40 PyFoamApplication.__init__(self, 41 args=args, 42 description=description, 43 usage="%prog [options] <casedir>", 44 nr=1, 45 changeVersion=False, 46 interspersed=True, 47 **kwargs)
48
49 - def addOptions(self):
50 report=OptionGroup(self.parser, 51 "Reports", 52 "What kind of reports should be produced") 53 self.parser.add_option_group(report) 54 select=OptionGroup(self.parser, 55 "Selection", 56 "Which data should be used for the reports") 57 self.parser.add_option_group(select) 58 internal=OptionGroup(self.parser, 59 "Internal", 60 "Details of the parser") 61 self.parser.add_option_group(internal) 62 63 format=OptionGroup(self.parser, 64 "Formatting", 65 "Restructured Text formatting") 66 self.parser.add_option_group(format) 67 68 format.add_option("--heading-level", 69 action="store", 70 type="int", 71 default=2, 72 dest="headingLevel", 73 help="Default level of the headings. Valid values from 0 to 5. Default: %default") 74 75 output=OptionGroup(self.parser, 76 "Output", 77 "How Output should be generated") 78 self.parser.add_option_group(output) 79 80 output.add_option("--file", 81 action="store", 82 default=None, 83 dest="file", 84 help="Write the output to a file instead of the console") 85 86 report.add_option("--full-report", 87 action="store_true", 88 default=False, 89 dest="all", 90 help="Print all available reports at once") 91 92 report.add_option("--short-bc-report", 93 action="store_true", 94 default=False, 95 dest="shortBCreport", 96 help="Gives a short overview of the boundary-conditions in the case") 97 98 report.add_option("--long-bc-report", 99 action="store_true", 100 default=False, 101 dest="longBCreport", 102 help="Gives a full overview of the boundary-conditions in the case") 103 104 report.add_option("--dimensions", 105 action="store_true", 106 default=False, 107 dest="dimensions", 108 help="Show the dimensions of the fields") 109 110 report.add_option("--internal-field", 111 action="store_true", 112 default=False, 113 dest="internal", 114 help="Show the internal value of the fields (the initial conditions)") 115 116 report.add_option("--linear-solvers", 117 action="store_true", 118 default=False, 119 dest="linearSolvers", 120 help="Print the linear solvers and their tolerance") 121 122 report.add_option("--relaxation-factors", 123 action="store_true", 124 default=False, 125 dest="relaxationFactors", 126 help="Print the relaxation factors (if there are any)") 127 128 select.add_option("--time", 129 action="store", 130 type="float", 131 default=None, 132 dest="time", 133 help="Time to use as the basis for the reports") 134 135 select.add_option("--region", 136 dest="region", 137 default=None, 138 help="Do the report for a special region for multi-region cases") 139 140 select.add_option("--all-regions", 141 dest="allRegions", 142 action="store_true", 143 default=False, 144 help="Do the report for all regions for multi-region cases") 145 146 select.add_option("--parallel", 147 action="store_true", 148 default=False, 149 dest="parallel", 150 help="Get times from the processor-directories") 151 152 internal.add_option("--long-field-threshold", 153 action="store", 154 type="int", 155 default=100, 156 dest="longlist", 157 help="Fields that are longer than this won't be parsed, but read into memory (and compared as strings). Default: %default") 158 internal.add_option("--no-do-macro-expansion", 159 action="store_false", 160 default=True, 161 dest="doMacros", 162 help="Don't expand macros with $ and # in the field-files") 163 164 internal.add_option("--treat-binary-as-ascii", 165 action="store_true", 166 default=False, 167 dest="treatBinaryAsASCII", 168 help="Try to treat binary dictionaries as ASCII anyway") 169 170 internal.add_option("--no-treat-boundary-binary-as-ascii", 171 action="store_false", 172 default=True, 173 dest="boundaryTreatBinaryAsASCII", 174 help="If 'boundary'-files are written as binary read them as such (default assumes that these files are ASCII whatever the header says)") 175 176 select.add_option("--patches", 177 action="append", 178 default=None, 179 dest="patches", 180 help="Patches which should be processed (pattern, can be used more than once)") 181 182 select.add_option("--exclude-patches", 183 action="append", 184 default=None, 185 dest="expatches", 186 help="Patches which should not be processed (pattern, can be used more than once)") 187 188 report.add_option("--processor-matrix", 189 action="store_true", 190 default=False, 191 dest="processorMatrix", 192 help="Prints the matrix how many faces from one processor interact with another") 193 194 report.add_option("--case-size", 195 action="store_true", 196 default=False, 197 dest="caseSize", 198 help="Report the number of cells, points and faces in the case") 199 200 report.add_option("--decomposition", 201 action="store_true", 202 default=False, 203 dest="decomposition", 204 help="Reports the size of the parallel decomposition")
205
206 - def run(self):
207 oldStdout=None 208 209 try: 210 if self.opts.file: 211 oldStdout=sys.stdout 212 if isinstance(self.opts.file,string_types): 213 sys.stdout=open(self.opts.file,"w") 214 else: 215 sys.stdout=self.opts.file 216 217 if self.opts.allRegions: 218 sol=SolutionDirectory(self.parser.getArgs()[0], 219 archive=None, 220 parallel=self.opts.parallel, 221 paraviewLink=False) 222 for r in sol.getRegions(): 223 self.doRegion(r) 224 else: 225 self.doRegion(self.opts.region) 226 finally: 227 if oldStdout: 228 sys.stdout=oldStdout
229
230 - def doRegion(self,theRegion):
231 ReST=RestructuredTextHelper(defaultHeading=self.opts.headingLevel) 232 233 if self.opts.allRegions: 234 print_(ReST.buildHeading("Region: ",theRegion,level=self.opts.headingLevel-1)) 235 236 sol=SolutionDirectory(self.parser.getArgs()[0], 237 archive=None, 238 parallel=self.opts.parallel, 239 paraviewLink=False, 240 region=theRegion) 241 242 if self.opts.all: 243 self.opts.caseSize=True 244 self.opts.shortBCreport=True 245 self.opts.longBCreport=True 246 self.opts.dimensions=True 247 self.opts.internal=True 248 self.opts.linearSolvers=True 249 self.opts.relaxationFactors=True 250 self.opts.processorMatrix=True 251 self.opts.decomposition=True 252 253 if self.opts.time: 254 try: 255 self.opts.time=sol.timeName(sol.timeIndex(self.opts.time,minTime=True)) 256 except IndexError: 257 error("The specified time",self.opts.time,"doesn't exist in the case") 258 print_("Using time t="+self.opts.time+"\n") 259 260 needsPolyBoundaries=False 261 needsInitialTime=False 262 263 if self.opts.longBCreport: 264 needsPolyBoundaries=True 265 needsInitialTime=True 266 if self.opts.shortBCreport: 267 needsPolyBoundaries=True 268 needsInitialTime=True 269 if self.opts.dimensions: 270 needsInitialTime=True 271 if self.opts.internal: 272 needsInitialTime=True 273 if self.opts.decomposition: 274 needsPolyBoundaries=True 275 276 defaultProc=None 277 if self.opts.parallel: 278 defaultProc=0 279 280 if needsPolyBoundaries: 281 proc=None 282 boundary=BoundaryDict(sol.name, 283 region=theRegion, 284 time=self.opts.time, 285 treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII, 286 processor=defaultProc) 287 288 boundMaxLen=0 289 boundaryNames=[] 290 for b in boundary: 291 if b.find("procBoundary")!=0: 292 boundaryNames.append(b) 293 if self.opts.patches!=None: 294 tmp=boundaryNames 295 boundaryNames=[] 296 for b in tmp: 297 for p in self.opts.patches: 298 if fnmatch(b,p): 299 boundaryNames.append(b) 300 break 301 302 if self.opts.expatches!=None: 303 tmp=boundaryNames 304 boundaryNames=[] 305 for b in tmp: 306 keep=True 307 for p in self.opts.expatches: 308 if fnmatch(b,p): 309 keep=False 310 break 311 if keep: 312 boundaryNames.append(b) 313 314 for b in boundaryNames: 315 boundMaxLen=max(boundMaxLen,len(b)) 316 boundaryNames.sort() 317 318 if self.opts.time==None: 319 procTime="constant" 320 else: 321 procTime=self.opts.time 322 323 if needsInitialTime: 324 fields={} 325 326 if self.opts.time==None: 327 try: 328 time=sol.timeName(0) 329 except IndexError: 330 error("There is no timestep in the case") 331 else: 332 time=self.opts.time 333 334 tDir=sol[time] 335 336 nameMaxLen=0 337 338 for f in tDir: 339 try: 340 fields[f.baseName()]=f.getContent(listLengthUnparsed=self.opts.longlist, 341 treatBinaryAsASCII=self.opts.treatBinaryAsASCII, 342 doMacroExpansion=self.opts.doMacros) 343 nameMaxLen=max(nameMaxLen,len(f.baseName())) 344 except PyFoamParserError: 345 e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e' 346 warning("Couldn't parse",f.name,"because of an error:",e," -> skipping") 347 348 fieldNames=list(fields.keys()) 349 fieldNames.sort() 350 351 if self.opts.caseSize: 352 print_(ReST.heading("Size of the case")) 353 354 nFaces=0 355 nPoints=0 356 nCells=0 357 if self.opts.parallel: 358 procs=list(range(sol.nrProcs())) 359 print_("Accumulated from",sol.nrProcs(),"processors") 360 else: 361 procs=[None] 362 363 for p in procs: 364 info=MeshInformation(sol.name, 365 processor=p, 366 region=theRegion, 367 time=self.opts.time) 368 nFaces+=info.nrOfFaces() 369 nPoints+=info.nrOfPoints() 370 try: 371 nCells+=info.nrOfCells() 372 except: 373 nCells="Not available" 374 tab=ReST.table() 375 tab[0]=("Faces",nFaces) 376 tab[1]=("Points",nPoints) 377 tab[2]=("Cells",nCells) 378 print_(tab) 379 380 if self.opts.decomposition: 381 print_(ReST.heading("Decomposition")) 382 383 if sol.nrProcs()<2: 384 print_("This case is not decomposed") 385 else: 386 print_("Case is decomposed for",sol.nrProcs(),"processors") 387 print_() 388 389 nCells=[] 390 nFaces=[] 391 nPoints=[] 392 for p in sol.processorDirs(): 393 info=MeshInformation(sol.name, 394 processor=p, 395 region=theRegion, 396 time=self.opts.time) 397 nPoints.append(info.nrOfPoints()) 398 nFaces.append(info.nrOfFaces()) 399 nCells.append(info.nrOfCells()) 400 401 digits=int(ceil(log10(max(sol.nrProcs(), 402 max(nCells), 403 max(nFaces), 404 max(nPoints) 405 ))))+2 406 nameLen=max(len("Points"),boundMaxLen) 407 408 tab=ReST.table() 409 tab[0]=["CPU"]+list(range(sol.nrProcs())) 410 411 tab.addLine() 412 413 tab[1]=["Points"]+nPoints 414 tab[2]=["Faces"]+nFaces 415 tab[3]=["Cells"]+nCells 416 tab.addLine(head=True) 417 418 nr=3 419 for b in boundaryNames: 420 nr+=1 421 tab[(nr,0)]=b 422 for i,p in enumerate(sol.processorDirs()): 423 try: 424 nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p, 425 region=theRegion, 426 time=self.opts.time), 427 treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII 428 )[b]["nFaces"] 429 except IOError: 430 nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p, 431 region=theRegion), 432 treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII 433 )[b]["nFaces"] 434 except KeyError: 435 nFaces=0 436 437 tab[(nr,i+1)]=nFaces 438 439 print_(tab) 440 441 if self.opts.longBCreport: 442 print_(ReST.heading("The boundary conditions for t =",time)) 443 444 for b in boundaryNames: 445 print_(ReST.buildHeading("Boundary: ",b,level=self.opts.headingLevel+1)) 446 bound=boundary[b] 447 print_(":Type:\t",bound["type"]) 448 if "physicalType" in bound: 449 print_(":Physical:\t",bound["physicalType"]) 450 print_(":Faces:\t",bound["nFaces"]) 451 print_() 452 heads=["Field","type"] 453 tab=ReST.table() 454 tab[0]=heads 455 tab.addLine(head=True) 456 for row,fName in enumerate(fieldNames): 457 tab[(row+1,0)]=fName 458 f=fields[fName] 459 if "boundaryField" not in f: 460 tab[(row+1,1)]="Not a field file" 461 elif b not in f["boundaryField"]: 462 tab[(row+1,1)]="MISSING !!!" 463 else: 464 bf=f["boundaryField"][b] 465 466 for k in bf: 467 try: 468 col=heads.index(k) 469 except ValueError: 470 col=len(heads) 471 tab[(0,col)]=k 472 heads.append(k) 473 cont=str(bf[k]) 474 if type(bf[k])==Field: 475 if bf[k].isBinary(): 476 cont= bf[k].binaryString() 477 478 if cont.find("\n")>=0: 479 tab[(row+1,col)]=cont[:cont.find("\n")]+"..." 480 else: 481 tab[(row+1,col)]=cont 482 print_(tab) 483 484 if self.opts.shortBCreport: 485 print_(ReST.heading("Table of boundary conditions for t =",time)) 486 487 types={} 488 hasPhysical=False 489 for b in boundary: 490 if "physicalType" in boundary[b]: 491 hasPhysical=True 492 493 types[b]={} 494 495 for fName in fields: 496 f=fields[fName] 497 try: 498 if b not in f["boundaryField"]: 499 types[b][fName]="MISSING" 500 else: 501 types[b][fName]=f["boundaryField"][b]["type"] 502 except KeyError: 503 types[b][fName]="Not a field" 504 505 tab=ReST.table() 506 tab[0]=[""]+boundaryNames 507 tab.addLine() 508 tab[(1,0)]="Patch Type" 509 for i,b in enumerate(boundaryNames): 510 tab[(1,i+1)]=boundary[b]["type"] 511 512 nr=2 513 if hasPhysical: 514 tab[(nr,0)]="Physical Type" 515 for i,b in enumerate(boundaryNames): 516 if "physicalType" in boundary[b]: 517 tab[(nr,i+1)]=boundary[b]["physicalType"] 518 nr+=1 519 520 tab[(nr,0)]="Length" 521 for i,b in enumerate(boundaryNames): 522 tab[(nr,i+1)]=boundary[b]["nFaces"] 523 nr+=1 524 tab.addLine(head=True) 525 526 for fName in fieldNames: 527 tab[(nr,0)]=fName 528 for i,b in enumerate(boundaryNames): 529 tab[(nr,i+1)]=types[b][fName] 530 nr+=1 531 532 print_(tab) 533 534 if self.opts.dimensions: 535 print_(ReST.heading("Dimensions of fields for t =",time)) 536 537 tab=ReST.table() 538 tab[0]=["Name"]+"[ kg m s K mol A cd ]".split()[1:-1] 539 tab.addLine(head=True) 540 for i,fName in enumerate(fieldNames): 541 f=fields[fName] 542 try: 543 dim=str(f["dimensions"]).split()[1:-1] 544 except KeyError: 545 dim=["-"]*7 546 tab[i+1]=[fName]+dim 547 print_(tab) 548 549 if self.opts.internal: 550 print_(ReST.heading("Internal value of fields for t =",time)) 551 552 tab=ReST.table() 553 tab[0]=["Name","Value"] 554 tab.addLine(head=True) 555 for i,fName in enumerate(fieldNames): 556 f=fields[fName] 557 558 try: 559 if f["internalField"].isBinary(): 560 val=f["internalField"].binaryString() 561 else: 562 cont=str(f["internalField"]) 563 if cont.find("\n")>=0: 564 val=cont[:cont.find("\n")]+"..." 565 else: 566 val=cont 567 except KeyError: 568 val="Not a field file" 569 tab[i+1]=[fName,val] 570 print_(tab) 571 572 if self.opts.processorMatrix: 573 print_(ReST.heading("Processor matrix")) 574 575 if sol.nrProcs()<2: 576 print_("This case is not decomposed") 577 else: 578 matrix=[ [0,]*sol.nrProcs() for i in range(sol.nrProcs())] 579 580 for i,p in enumerate(sol.processorDirs()): 581 try: 582 bound=ParsedBoundaryDict(sol.boundaryDict(processor=p, 583 region=theRegion, 584 time=self.opts.time) 585 ,treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII) 586 except IOError: 587 bound=ParsedBoundaryDict(sol.boundaryDict(processor=p, 588 treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII, 589 region=theRegion) 590 ,treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII) 591 592 for j in range(sol.nrProcs()): 593 name="procBoundary%dto%d" %(j,i) 594 name2="procBoundary%dto%d" %(i,j) 595 if name in bound: 596 matrix[i][j]=bound[name]["nFaces"] 597 if name2 in bound: 598 matrix[i][j]=bound[name2]["nFaces"] 599 600 print_("Matrix of processor interactions (faces)") 601 print_() 602 603 tab=ReST.table() 604 tab[0]=["CPU"]+list(range(sol.nrProcs())) 605 tab.addLine(head=True) 606 607 for i,col in enumerate(matrix): 608 tab[i+1]=[i]+matrix[i] 609 610 print_(tab) 611 612 if self.opts.linearSolvers: 613 print_(ReST.heading("Linear Solvers")) 614 615 linTable=ReST.table() 616 617 fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"), 618 treatBinaryAsASCII=self.opts.treatBinaryAsASCII) 619 allInfo={} 620 for sName in fvSol["solvers"]: 621 raw=fvSol["solvers"][sName] 622 info={} 623 if type(raw) in [dict,DictProxy]: 624 # fvSolution format in 1.7 625 try: 626 info["solver"]=raw["solver"] 627 except KeyError: 628 info["solver"]="<none>" 629 solverData=raw 630 else: 631 info["solver"]=raw[0] 632 solverData=raw[1] 633 634 if type(solverData) in [dict,DictProxy]: 635 try: 636 info["tolerance"]=solverData["tolerance"] 637 except KeyError: 638 info["tolerance"]=1. 639 try: 640 info["relTol"]=solverData["relTol"] 641 except KeyError: 642 info["relTol"]=0. 643 else: 644 # the old (pre-1.5) fvSolution-format 645 info["tolerance"]=solverData 646 info["relTol"]=raw[2] 647 648 allInfo[sName]=info 649 650 linTable[0]=["Name","Solver","Abs. Tolerance","Relative Tol."] 651 linTable.addLine(head=True) 652 653 nr=0 654 for n,i in iteritems(allInfo): 655 nr+=1 656 linTable[nr]=(n,i["solver"],i["tolerance"],i["relTol"]) 657 print_(linTable) 658 659 if self.opts.relaxationFactors: 660 print_(ReST.heading("Relaxation")) 661 662 fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"), 663 treatBinaryAsASCII=self.opts.treatBinaryAsASCII) 664 if "relaxationFactors" in fvSol: 665 relax=fvSol["relaxationFactors"] 666 tab=ReST.table() 667 tab[0]=["Name","Factor"] 668 tab.addLine(head=True) 669 nr=0 670 if "fields" in relax or "equations" in relax: 671 # New syntax 672 for k in ["fields","equations"]: 673 if k in relax: 674 for n,f in iteritems(relax[k]): 675 nr+=1 676 tab[nr]=[k+": "+n,f] 677 else: 678 for n,f in iteritems(relax): 679 nr+=1 680 tab[nr]=[n,f] 681 print_(tab) 682 else: 683 print_("No relaxation factors defined for this case")
684 685 # Should work with Python3 and Python2 686