Package PyFoam :: Package RunDictionary :: Module SolutionDirectory
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.RunDictionary.SolutionDirectory

   1  #  ICE Revision: $Id$ 
   2  """Working with a solution directory""" 
   3   
   4  from PyFoam.Basics.Utilities import Utilities 
   5  from PyFoam.Basics.BasicFile import BasicFile 
   6  from PyFoam.Error import warning,error 
   7  from PyFoam import configuration as conf 
   8   
   9  from PyFoam.RunDictionary.TimeDirectory import TimeDirectory 
  10  from PyFoam.RunDictionary.ParsedParameterFile import ParsedParameterFile,WriteParameterFile 
  11   
  12  from PyFoam.Basics.DataStructures import DictProxy 
  13   
  14  from PyFoam.ThirdParty.six import print_ 
  15   
  16  from os import listdir,path,mkdir,stat,environ 
  17  from platform import uname 
  18  from time import asctime 
  19  from stat import ST_CTIME 
  20  import tarfile,fnmatch,glob 
  21  import re,os 
  22   
  23  try: 
  24      from os import getlogin 
  25  except ImportError: 
  26      try: 
  27          import PyFoam.ThirdParty.winhacks 
  28      except ImportError: 
  29          print_("Unable to import the getlogin function.") 
  30          import sys 
  31          sys.exit(-1) 
32 33 -class SolutionDirectory(Utilities):
34 """Represents a solution directory 35 36 In the solution directory subdirectories whose names are numbers 37 are assumed to be solutions for a specific time-step 38 39 A sub-directory (called the Archive) is created to which solution 40 data is copied""" 41
42 - def __init__(self, 43 name, 44 archive="ArchiveDir", 45 paraviewLink=True, 46 parallel=False, 47 addLocalConfig=False, 48 tolerant=False, 49 region=None):
50 """@param name: Name of the solution directory 51 @param archive: name of the directory where the lastToArchive-method 52 should copy files, if None no archive is created 53 @param paraviewLink: Create a symbolic link controlDict.foam for paraview 54 @param tolerant: do not fail for minor inconsistencies 55 @param parallel: use the first processor-subdirectory for the authorative information 56 @param region: Mesh region for multi-region cases""" 57 58 self.name=path.abspath(name) 59 self.archive=None 60 if archive!=None: 61 self.archive=path.join(name,archive) 62 if not path.exists(self.archive): 63 mkdir(self.archive) 64 65 self.region=region 66 self.backups=[] 67 68 self.parallel=parallel 69 self.tolerant=tolerant 70 71 self.lastReread=0 72 self.reread() 73 74 self.dirPrefix='' 75 if self.processorDirs() and parallel: 76 self.dirPrefix = self.processorDirs()[0] 77 78 self.essential=set([self.systemDir(), 79 self.constantDir()]) 80 81 # only add the initial directory if no template exists 82 if not path.exists(path.join(self.name,"0.org")) and not self.initialDir() is None: 83 self.addToClone(self.initialDir()) 84 85 # PyFoam-specific 86 self.addToClone("PyFoamHistory") 87 self.addToClone("customRegexp") 88 self.addToClone("LocalConfigPyFoam") 89 90 # this usually comes with the tutorials 91 self.addToClone("Allclean") 92 self.addToClone("Allrun") 93 94 self.addToClone("*.ipynb") 95 96 emptyFoamFile=path.join(self.name,path.basename(self.name)+".foam") 97 if paraviewLink and not path.exists(emptyFoamFile): 98 dummy=open(emptyFoamFile,"w") # equivalent to touch 99 100 if addLocalConfig: 101 self.addLocalConfig() 102 103 # These are used by PrepareCase 104 self.addToClone("*.org") 105 self.addToClone("*.template") 106 self.addToClone("*.sh") 107 108 self.__postprocDirs=[] 109 self.__postprocInfo={} 110 self.addPostprocDir(".") 111 self.addPostprocDir("postProcessing",fail=False)
112
113 - def setToParallel(self):
114 """Use the parallel times instead of the serial. 115 116 Used to reset the behaviour after it has been set by the constructor""" 117 if self.parallel: 118 warning(self.name,"is already in parallel mode") 119 else: 120 self.parallel=True 121 if self.processorDirs(): 122 self.dirPrefix = self.processorDirs()[0] 123 self.reread(force=True)
124
125 - def addLocalConfig(self):
126 """Add the local configuration file of the case to the configuration""" 127 fName=path.join(self.name,"LocalConfigPyFoam") 128 if path.exists(fName): 129 conf().addFile(fName)
130
131 - def __len__(self):
132 self.reread() 133 return len(self.times)
134
135 - def __contains__(self,item):
136 self.reread() 137 138 if self.timeName(item)!=None: 139 return True 140 else: 141 return False
142
143 - def __getitem__(self,key):
144 self.reread() 145 146 ind=self.timeName(key) 147 if ind==None: 148 raise KeyError(key) 149 else: 150 return TimeDirectory(self.name, self.fullPath(ind), region=self.region)
151
152 - def __setitem__(self,key,value):
153 self.reread() 154 if type(key)!=str: 155 raise TypeError(type(key),"of",key,"is not 'str'") 156 157 if type(value)!=TimeDirectory: 158 raise TypeError(type(value),"is not TimeDirectory") 159 160 dest=TimeDirectory(self.name, self.fullPath(key), create=True,region=self.region) 161 dest.copy(value) 162 163 self.reread(force=True)
164
165 - def __delitem__(self,key):
166 self.reread() 167 nm=self.timeName(key) 168 if nm==None: 169 raise KeyError(key) 170 171 self.rmtree(path.join(self.name, self.fullPath(nm)),ignore_errors=True) 172 173 self.reread(force=True)
174
175 - def __iter__(self):
176 self.reread() 177 for key in self.times: 178 yield TimeDirectory(self.name, 179 self.fullPath(key), 180 region=self.region, 181 tolerant=self.tolerant)
182
183 - def timeName(self,item,minTime=False):
184 """Finds the name of a directory that corresponds with the given parameter 185 @param item: the time that should be found 186 @param minTime: search for the time with the minimal difference. 187 Otherwise an exact match will be searched""" 188 189 if type(item)==int: 190 return self.times[item] 191 else: 192 ind=self.timeIndex(item,minTime) 193 if ind==None: 194 return None 195 else: 196 return self.times[ind]
197
198 - def timeIndex(self,item,minTime=False):
199 """Finds the index of a directory that corresponds with the given parameter 200 @param item: the time that should be found 201 @param minTime: search for the time with the minimal difference. 202 Otherwise an exact match will be searched""" 203 self.reread() 204 205 time=float(item) 206 result=None 207 208 if minTime: 209 result=0 210 for i in range(1,len(self.times)): 211 if abs(float(self.times[result])-time)>abs(float(self.times[i])-time): 212 result=i 213 else: 214 for i in range(len(self.times)): 215 t=self.times[i] 216 if abs(float(t)-time)<1e-6: 217 if result==None: 218 result=i 219 elif abs(float(t)-time)<abs(float(self.times[result])-time): 220 result=i 221 222 return result
223
224 - def fullPath(self,time):
225 if self.dirPrefix: 226 return path.join(self.dirPrefix, time) 227 return time
228
229 - def isValid(self):
230 """Checks whether this is a valid case directory by looking for 231 the system- and constant-directories and the controlDict-file""" 232 233 return len(self.missingFiles())==0
234
235 - def missingFiles(self):
236 """Return a list of all the missing files and directories that 237 are needed for a valid case""" 238 missing=[] 239 if not path.exists(self.systemDir()): 240 missing.append(self.systemDir()) 241 elif not path.isdir(self.systemDir()): 242 missing.append(self.systemDir()) 243 if not path.exists(self.constantDir()): 244 missing.append(self.constantDir()) 245 elif not path.isdir(self.constantDir()): 246 missing.append(self.constantDir()) 247 if not path.exists(self.controlDict()): 248 missing.append(self.controlDict()) 249 250 return missing
251
252 - def addToClone(self,name):
253 """add directory to the list that is needed to clone this case 254 @param name: name of the subdirectory (the case directory is prepended)""" 255 if path.exists(path.join(self.name,name)): 256 self.essential.add(path.join(self.name,name)) 257 elif self.parallel: 258 if path.exists(path.join(self.name,"processor0",name)): 259 self.essential.add(path.join(self.name,name)) 260 else: 261 # check whether this is a file pattern 262 for f in glob.glob(path.join(self.name,name)): 263 # no check for existence necessary 264 self.essential.add(f)
265
266 - def cloneCase(self,name,svnRemove=True,followSymlinks=False):
267 """create a clone of this case directory. Remove the target directory, if it already exists 268 269 @param name: Name of the new case directory 270 @param svnRemove: Look for .svn-directories and remove them 271 @param followSymlinks: Follow symbolic links instead of just copying them 272 @rtype: L{SolutionDirectory} or correct subclass 273 @return: The target directory""" 274 275 additional=eval(conf().get("Cloning","addItem")) 276 for a in additional: 277 self.addToClone(a) 278 279 if path.exists(name): 280 self.rmtree(name) 281 mkdir(name) 282 if self.parallel: 283 for i in range(self.nrProcs()): 284 mkdir(path.join(name,"processor%d" % i)) 285 286 for d in self.essential: 287 if d!=None: 288 fs=followSymlinks 289 if fs: 290 noForce=eval(conf().get("Cloning","noForceSymlink")) 291 pth,fl=path.split(d) 292 for n in noForce: 293 if fnmatch.fnmatch(fl,n): 294 fs=False 295 break 296 297 if self.parallel: 298 pth,fl=path.split(d) 299 if path.exists(path.join(pth,"processor0",fl)): 300 for i in range(self.nrProcs()): 301 self.copytree(path.join(pth,"processor%d" % i,fl), 302 path.join(name,"processor%d" % i), 303 symlinks=not fs) 304 305 if path.exists(d): 306 self.copytree(d,name,symlinks=not fs) 307 308 if svnRemove: 309 self.execute("find "+name+" -name .svn -exec rm -rf {} \\; -prune") 310 311 return self.__class__(name,archive=self.archive)
312
313 - def symlinkCase(self, 314 name, 315 followSymlinks=False, 316 maxLevel=1, 317 relPath=False):
318 """create a clone of this case directory by creating a 319 directory with symbolic links 320 321 @param name: Name of the new case directory 322 @param maxLevel: Maximum level down to which directories are created instead of symbolically linked 323 @param followSymlinks: Follow symbolic links instead of just copying them 324 @param relPath: the created symbolic links are relative (instead of absolute) 325 @rtype: L{SolutionDirectory} or correct subclass 326 @return: The target directory 327 """ 328 here=path.abspath(self.name) 329 polyDirs=[path.relpath(p,here) for p in self.find("polyMesh*",here)] 330 331 additional=eval(conf().get("Cloning","addItem")) 332 for a in additional: 333 self.addToClone(a) 334 335 if path.exists(name): 336 self.rmtree(name) 337 mkdir(name) 338 toProcess=[] 339 for d in self.essential: 340 if d!=None: 341 if self.parallel: 342 pth,fl=path.split(d) 343 if path.exists(path.join(pth,"processor0",fl)): 344 for i in range(self.nrProcs()): 345 toProcess.append("processor%d" % i) 346 if path.exists(d): 347 toProcess.append(path.relpath(d,here)) 348 349 maxLevel=max(0,maxLevel) 350 351 self.__symlinkDir(src=here, 352 dest=path.abspath(name), 353 toProcess=toProcess, 354 maxLevel=maxLevel, 355 relPath=relPath, 356 polyDirs=polyDirs, 357 symlinks=not followSymlinks) 358 359 return self.__class__(name,archive=self.archive)
360
361 - def __symlinkDir(self,src,dest,toProcess,maxLevel,relPath,polyDirs,symlinks):
362 for f in toProcess: 363 there=path.join(src,f) 364 here=path.join(dest,f) 365 if path.islink(there) and not symlinks: 366 there=path.realpath(there) 367 368 doSymlink=False 369 done=False 370 371 if not path.isdir(there): 372 doSymlink=True 373 if path.basename(src)=="polyMesh": 374 if f not in ["blockMeshDict","blockMeshDict.gz"]: 375 doSymlink=False 376 else: 377 poly=[p for p in polyDirs if p.split(path.sep)[0]==f] 378 if maxLevel>0 or len(poly)>0: 379 done=True 380 mkdir(here) 381 self.__symlinkDir(src=there,dest=here, 382 toProcess=[p for p in os.listdir(there) if p[0]!='.'], 383 maxLevel=max(0,maxLevel-1), 384 relPath=relPath, 385 polyDirs=[path.join(*p.split(path.sep)[1:]) for p in poly if len(p.split(path.sep))>1], 386 symlinks=symlinks) 387 else: 388 doSymlink=True 389 390 if not done: 391 if doSymlink: 392 if relPath: 393 linkTo=path.relpath(there,dest) 394 else: 395 linkTo=path.abspath(there) 396 os.symlink(linkTo,here) 397 else: 398 self.copytree(there,here,symlinks=symlinks)
399
400 - def packCase(self,tarname,last=False,exclude=[],additional=[],base=None):
401 """Packs all the important files into a compressed tarfile. 402 Uses the essential-list and excludes the .svn-directories. 403 Also excludes files ending with ~ 404 @param tarname: the name of the tar-file 405 @param last: add the last directory to the list of directories to be added 406 @param exclude: List with additional glob filename-patterns to be excluded 407 @param additional: List with additional glob filename-patterns 408 that are to be added 409 @param base: Different name that is to be used as the baseName for the case inside the tar""" 410 411 ex=["*~",".svn"]+exclude 412 members=list(self.essential) 413 if last: 414 if self.getLast()!=self.first: 415 members.append(self.latestDir()) 416 for p in additional: 417 for f in listdir(self.name): 418 if (f not in members) and fnmatch.fnmatch(f,p): 419 members.append(path.join(self.name,f)) 420 421 tar=tarfile.open(tarname,"w:gz") 422 423 for m in members: 424 self.addToTar(tar,m,exclude=ex,base=base) 425 426 additional=eval(conf().get("Cloning","addItem")) 427 for a in additional: 428 self.addToTar(tar, 429 path.join(self.name,a), 430 exclude=ex, 431 base=base) 432 433 tar.close()
434
435 - def addToTar(self,tar,pattern,exclude=[],base=None):
436 """The workhorse for the packCase-method""" 437 438 if base==None: 439 base=path.basename(self.name) 440 441 for name in glob.glob(pattern): 442 excluded=False 443 for e in exclude: 444 if fnmatch.fnmatch(path.basename(name),e): 445 excluded=True 446 if excluded: 447 continue 448 449 if path.isdir(name): 450 for m in listdir(name): 451 self.addToTar(tar,path.join(name,m),exclude=exclude,base=base) 452 else: 453 arcname=path.join(base,name[len(self.name)+1:]) 454 if path.islink(name): 455 # if the symbolic link points to a file in the case keep it 456 # otherwise replace with the real file 457 lPath=path.os.readlink(name) 458 if not path.isabs(lPath): 459 rPath=path.realpath(name) 460 common=path.commonprefix([path.abspath(rPath), 461 path.abspath(base)]) 462 # if the path is shorter than the base it must be outside the case 463 if len(common)<len(path.abspath(base)): 464 name=path.abspath(rPath) 465 else: 466 # use the abolute path 467 name=lPath 468 try: 469 tar.getmember(arcname) 470 # don't add ... the file is already there' 471 except KeyError: 472 # file not in tar 473 tar.add(name,arcname=arcname)
474
475 - def getParallelTimes(self):
476 """Get a list of the times in the processor0-directory""" 477 result=[] 478 479 proc0=path.join(self.name,"processor0") 480 if path.exists(proc0): 481 for f in listdir(proc0): 482 try: 483 val=float(f) 484 result.append(f) 485 except ValueError: 486 pass 487 result.sort(key=float) 488 return result
489
490 - def reread(self,force=False):
491 """Rescan the directory for the time directories""" 492 493 if not force and stat(self.name)[ST_CTIME]<=self.lastReread: 494 return 495 496 self.times=[] 497 self.first=None 498 self.last=None 499 procDirs = self.processorDirs() 500 self.procNr=len(procDirs) 501 502 if procDirs and self.parallel: 503 timesDir = path.join(self.name, procDirs[0]) 504 else: 505 timesDir = self.name 506 507 for f in listdir(timesDir): 508 try: 509 val=float(f) 510 self.times.append(f) 511 except ValueError: 512 pass 513 514 self.lastReread=stat(self.name)[ST_CTIME] 515 516 self.times.sort(key=float) 517 if self.times: 518 self.first = self.times[0] 519 self.last = self.times[-1]
520
521 - def processorDirs(self):
522 """List with the processor directories""" 523 try: 524 return self.procDirs 525 except: 526 pass 527 self.procDirs=[] 528 for f in listdir(self.name): 529 if re.compile("processor[0-9]+").match(f): 530 self.procDirs.append(f) 531 532 return self.procDirs
533
534 - def nrProcs(self):
535 """The number of directories with processor-data""" 536 self.reread() 537 return self.procNr
538
539 - def getTimes(self):
540 """ @return: List of all the available times""" 541 self.reread() 542 return self.times
543
544 - def addBackup(self,pth):
545 """add file to list of files that are to be copied to the 546 archive""" 547 self.backups.append(path.join(self.name,pth))
548
549 - def getFirst(self):
550 """@return: the first time for which a solution exists 551 @rtype: str""" 552 self.reread() 553 return self.first
554
555 - def getLast(self):
556 """@return: the last time for which a solution exists 557 @rtype: str""" 558 self.reread() 559 return self.last
560
561 - def lastToArchive(self,name):
562 """copy the last solution (plus the backup-files to the 563 archive) 564 565 @param name: name of the sub-directory in the archive""" 566 if self.archive==None: 567 print_("Warning: nor Archive-directory") 568 return 569 570 self.reread() 571 fname=path.join(self.archive,name) 572 if path.exists(fname): 573 self.rmtree(fname) 574 mkdir(fname) 575 self.copytree(path.join(self.name,self.last),fname) 576 for f in self.backups: 577 self.copytree(f,fname)
578
579 - def clearResults(self, 580 after=None, 581 removeProcs=False, 582 keepLast=False, 583 vtk=True, 584 keepRegular=False, 585 keepParallel=False, 586 keepInterval=None, 587 functionObjectData=False, 588 additional=[]):
589 """remove all time-directories after a certain time. If not time ist 590 set the initial time is used 591 @param after: time after which directories ar to be removed 592 @param removeProcs: if True the processorX-directories are removed. 593 Otherwise the timesteps after last are removed from the 594 processor-directories 595 @param keepLast: Keep the data from the last timestep 596 @param keepInterval: if set: keep timesteps that are this far apart 597 @param vtk: Remove the VTK-directory if it exists 598 @param keepRegular: keep all the times (only remove processor and other stuff) 599 @param functionObjectData: tries do determine which data was written by function obejects and removes it 600 @param additional: List with glob-patterns that are removed too""" 601 602 self.reread() 603 604 last=self.getLast() 605 606 if after==None: 607 try: 608 time=float(self.first) 609 except TypeError: 610 warning("The first timestep in",self.name," is ",self.first,"not a number. Doing nothing") 611 return 612 else: 613 time=float(after) 614 615 lastKeptIndex=int(-1e5) 616 617 if keepInterval!=None: 618 if keepInterval<=0: 619 error("The keeping interval",keepInterval,"is smaller that 0") 620 621 if not keepRegular: 622 for f in self.times: 623 keep=False 624 if keepInterval!=None: 625 thisIndex=int((float(f)+1e-10)/keepInterval) 626 if thisIndex!=lastKeptIndex: 627 keep=True 628 if float(f)>time and not (keepLast and f==last) and not keep: 629 # print "Removing",path.join(self.name,f) 630 self.rmtree(path.join(self.name,f)) 631 elif keepInterval!=None: 632 lastKeptIndex=int((float(f)+1e-10)/keepInterval) 633 634 if path.exists(path.join(self.name,"VTK")) and vtk: 635 self.rmtree(path.join(self.name,"VTK")) 636 637 if self.nrProcs() and not keepParallel: 638 lastKeptIndex=int(-1e5) 639 for f in listdir(self.name): 640 if re.compile("processor[0-9]+").match(f): 641 if removeProcs: 642 self.rmtree(path.join(self.name,f)) 643 else: 644 pDir=path.join(self.name,f) 645 for t in listdir(pDir): 646 try: 647 keep=False 648 val=float(t) 649 if keepInterval!=None: 650 thisIndex=int((float(f)+1e-10)/keepInterval) 651 if thisIndex!=lastKeptIndex: 652 keep=True 653 if val>time and not (keepLast and f==last) and not keep: 654 self.rmtree(path.join(pDir,t)) 655 elif keepInterval!=None: 656 lastKeptIndex=int((float(f)+1e-10)/keepInterval) 657 except ValueError: 658 pass 659 660 if functionObjectData: 661 cd=ParsedParameterFile(self.controlDict(),doMacroExpansion=True) 662 if "functions" in cd: 663 if type(cd["functions"]) in [DictProxy,dict]: 664 for f in cd["functions"]: 665 pth=path.join(self.name,f) 666 if path.exists(pth): 667 self.rmtree(pth) 668 else: 669 for f in cd["functions"][0::2]: 670 pth=path.join(self.name,f) 671 if path.exists(pth): 672 self.rmtree(pth) 673 674 additional+=eval(conf().get("Clearing","additionalpatterns")) 675 for a in additional: 676 self.clearPattern(a)
677
678 - def clearPattern(self,globPat):
679 """Clear all files that fit a certain shell (glob) pattern 680 @param glob: the pattern which the files are going to fit""" 681 682 for f in glob.glob(path.join(self.name,globPat)): 683 if path.isdir(f): 684 self.rmtree(f,ignore_errors=False) 685 else: 686 os.unlink(f)
687
688 - def clearOther(self, 689 pyfoam=True, 690 removeAnalyzed=False, 691 clearHistory=False, 692 clearParameters=False):
693 """Remove additional directories 694 @param pyfoam: rremove all directories typically created by PyFoam""" 695 696 if pyfoam: 697 self.clearPattern("PyFoam.?*") 698 if removeAnalyzed: 699 self.clearPattern("*?.analyzed") 700 if clearParameters: 701 self.clearPattern("PyFoamPrepareCaseParameters") 702 if clearHistory: 703 self.clearPattern("PyFoamHistory")
704
705 - def clear(self, 706 after=None, 707 processor=True, 708 pyfoam=True, 709 keepLast=False, 710 vtk=True, 711 keepRegular=False, 712 keepParallel=False, 713 keepInterval=None, 714 removeAnalyzed=False, 715 clearHistory=False, 716 clearParameters=False, 717 functionObjectData=False, 718 additional=[]):
719 """One-stop-shop to remove data 720 @param after: time after which directories ar to be removed 721 @param processor: remove the processorXX directories 722 @param pyfoam: rremove all directories typically created by PyFoam 723 @param keepLast: Keep the last time-step 724 @param additional: list with additional patterns to clear""" 725 self.clearResults(after=after, 726 removeProcs=processor, 727 keepLast=keepLast, 728 keepInterval=keepInterval, 729 vtk=vtk, 730 keepRegular=keepRegular, 731 keepParallel=keepParallel, 732 functionObjectData=functionObjectData, 733 additional=additional) 734 self.clearOther(pyfoam=pyfoam, 735 removeAnalyzed=removeAnalyzed, 736 clearParameters=clearParameters, 737 clearHistory=clearHistory)
738
739 - def initialDir(self):
740 """@return: the name of the first time-directory (==initial 741 conditions) 742 @rtype: str""" 743 self.reread() 744 745 if self.first: 746 return path.join(self.name,self.first) 747 else: 748 if path.exists(path.join(self.name,"0.org")): 749 return path.join(self.name,"0.org") 750 else: 751 return None
752
753 - def latestDir(self):
754 """@return: the name of the first last-directory (==simulation 755 results) 756 @rtype: str""" 757 self.reread() 758 759 last=self.getLast() 760 if last: 761 return path.join(self.name,last) 762 else: 763 return None
764
765 - def constantDir(self,region=None,processor=None):
766 """@param region: Specify the region for cases with more than 1 mesh 767 @param processor: name of the processor directory 768 @return: the name of the C{constant}-directory 769 @rtype: str""" 770 pre=self.name 771 if processor!=None: 772 if type(processor)==int: 773 processor="processor%d" % processor 774 pre=path.join(pre,processor) 775 776 if region==None and self.region!=None: 777 region=self.region 778 if region: 779 return path.join(pre,"constant",region) 780 else: 781 return path.join(pre,"constant")
782
783 - def systemDir(self,region=None):
784 """@param region: Specify the region for cases with more than 1 mesh 785 @return: the name of the C{system}-directory 786 @rtype: str""" 787 if region==None and self.region!=None: 788 region=self.region 789 if region: 790 return path.join(self.name,"system",region) 791 else: 792 return path.join(self.name,"system")
793
794 - def controlDict(self):
795 """@return: the name of the C{controlDict} 796 @rtype: str""" 797 return path.join(self.systemDir(),"controlDict")
798
799 - def polyMeshDir(self,region=None,time=None,processor=None):
800 """@param region: Specify the region for cases with more than 1 mesh 801 @return: the name of the C{polyMesh} 802 @param time: Time for which the mesh should be looked at 803 @param processor: Name of the processor directory for decomposed cases 804 @rtype: str""" 805 if region==None and self.region!=None: 806 region=self.region 807 if time==None: 808 return path.join( 809 self.constantDir( 810 region=region, 811 processor=processor), 812 "polyMesh") 813 else: 814 return path.join( 815 TimeDirectory(self.name, 816 time, 817 region=region, 818 processor=processor).name, 819 "polyMesh")
820
821 - def boundaryDict(self,region=None,time=None,processor=None):
822 """@param region: Specify the region for cases with more than 1 mesh 823 @return: name of the C{boundary}-file 824 @rtype: str""" 825 if region==None and self.region!=None: 826 region=self.region 827 return path.join(self.polyMeshDir(region=region,time=time,processor=processor),"boundary")
828
829 - def blockMesh(self,region=None):
830 """@param region: Specify the region for cases with more than 1 mesh 831 @return: the name of the C{blockMeshDict} if it exists. Returns 832 an empty string if it doesn't 833 @rtype: str""" 834 if region==None and self.region!=None: 835 region=self.region 836 p=path.join(self.polyMeshDir(region=region),"blockMeshDict") 837 if path.exists(p): 838 return p 839 else: 840 return ""
841
842 - def makeFile(self,name):
843 """create a file in the solution directory and return a 844 corresponding BasicFile-object 845 846 @param name: Name of the file 847 @rtype: L{BasicFile}""" 848 return BasicFile(path.join(self.name,name))
849
850 - def getRegions(self,defaultRegion=False):
851 """Gets a list of all the available mesh regions by checking all 852 directories in constant and using all those that have a polyMesh-subdirectory 853 @param defaultRegion: should the default region also be added (as None)""" 854 lst=[] 855 for d in self.listDirectory(self.constantDir()): 856 if path.isdir(path.join(self.constantDir(),d)): 857 if path.exists(self.polyMeshDir(region=d)): 858 lst.append(d) 859 860 if defaultRegion: 861 if path.exists(self.polyMeshDir()): 862 lst.append(None) 863 864 lst.sort() 865 return lst
866
867 - def addToHistory(self,*text):
868 """Adds a line with date and username to a file 'PyFoamHistory' 869 that resides in the local directory""" 870 hist=open(path.join(self.name,"PyFoamHistory"),"a") 871 872 try: 873 # this seems to fail when no stdin is available 874 username=getlogin() 875 except OSError: 876 username=environ["USER"] 877 878 hist.write("%s by %s in %s :" % (asctime(),username,uname()[1])) 879 880 for t in text: 881 hist.write(str(t)+" ") 882 883 hist.write("\n") 884 hist.close()
885
886 - def listFiles(self,directory=None):
887 """List all the plain files (not directories) in a subdirectory 888 of the case 889 @param directory: the subdirectory. If unspecified the 890 case-directory itself is used 891 @return: List with the plain filenames""" 892 893 result=[] 894 theDir=self.name 895 if directory: 896 theDir=path.join(theDir,directory) 897 898 for f in listdir(theDir): 899 if f[0]!='.' and f[-1]!='~': 900 if path.isfile(path.join(theDir,f)): 901 result.append(f) 902 903 return result
904
905 - def getDictionaryText(self,directory,name):
906 """@param directory: Sub-directory of the case 907 @param name: name of the dictionary file 908 @return: the contents of the file as a big string""" 909 910 result=None 911 theDir=self.name 912 if directory: 913 theDir=path.join(theDir,directory) 914 915 if path.exists(path.join(theDir,name)): 916 result=open(path.join(theDir,name)).read() 917 else: 918 warning("File",name,"does not exist in directory",directory,"of case",self.name) 919 920 return result
921
922 - def writeDictionaryContents(self,directory,name,contents):
923 """Writes the contents of a dictionary 924 @param directory: Sub-directory of the case 925 @param name: name of the dictionary file 926 @param contents: Python-dictionary with the dictionary contents""" 927 928 theDir=self.name 929 if directory: 930 theDir=path.join(theDir,directory) 931 932 result=WriteParameterFile(path.join(theDir,name)) 933 result.content=contents 934 result.writeFile()
935
936 - def writeDictionaryText(self,directory,name,text):
937 """Writes the contents of a dictionary 938 @param directory: Sub-directory of the case 939 @param name: name of the dictionary file 940 @param text: String with the dictionary contents""" 941 942 theDir=self.name 943 if directory: 944 theDir=path.join(theDir,directory) 945 946 result=open(path.join(theDir,name),"w").write(text)
947
948 - def getDictionaryContents(self,directory,name):
949 """@param directory: Sub-directory of the case 950 @param name: name of the dictionary file 951 @return: the contents of the file as a python data-structure""" 952 953 result={} 954 theDir=self.name 955 if directory: 956 theDir=path.join(theDir,directory) 957 958 if path.exists(path.join(theDir,name)): 959 result=ParsedParameterFile(path.join(theDir,name)).content 960 else: 961 warning("File",name,"does not exist in directory",directory,"of case",self.name) 962 963 return result
964
965 - def determineVCS(self):
966 """Find out whether this directory is controlled by a VCS and 967 return the abbreviation of that VCS""" 968 969 if path.isdir(path.join(self.name,".hg")): 970 return "hg" 971 elif path.isdir(path.join(self.name,".git")): 972 return "git" 973 elif path.isdir(path.join(self.name,".svn")): 974 return "svn" 975 else: 976 return None
977
978 - def addPostprocDir(self,dirName,fail=True):
979 if dirName in self.__postprocDirs: 980 return 981 full=path.join(self.name,dirName) 982 if not path.isdir(full): 983 if fail: 984 error(full,"does not exist or is no directory") 985 else: 986 return 987 988 self.__postprocDirs.append(dirName) 989 self.__postprocInfo={}
990
991 - def __classifyDirectory(self,dPath):
992 cnt=0 993 minimum="1e40" 994 for d in listdir(dPath): 995 full=path.join(dPath,d) 996 if not path.isdir(full): 997 continue 998 try: 999 if float(d)<float(minimum): 1000 minimum=d 1001 cnt+=1 1002 except ValueError: 1003 continue 1004 if cnt<=0: 1005 return None 1006 first=path.join(dPath,minimum) 1007 hypothesis=None 1008 for f in listdir(first): 1009 ff=path.join(first,f) 1010 if not path.isfile(ff): 1011 continue 1012 try: 1013 float(f) 1014 continue 1015 except ValueError: 1016 pass 1017 b,e=path.splitext(f) 1018 if e==".xy": 1019 newHypothesis="sample" 1020 elif e==".vtk": 1021 newHypothesis="surface" 1022 elif e=="": 1023 if b.find("istribution")>0: 1024 newHypothesis="distribution" 1025 else: 1026 newHypothesis="timeline" 1027 else: 1028 newHypothesis=None 1029 1030 if hypothesis==None: 1031 hypothesis=newHypothesis 1032 elif hypothesis!=newHypothesis and newHypothesis: 1033 error("Can not decide between",hypothesis, 1034 "and",newHypothesis,"for",full) 1035 return hypothesis
1036
1037 - def __scanForPostproc(self,dirName):
1038 for d in listdir(path.join(self.name,dirName)): 1039 full=path.join(self.name,dirName,d) 1040 if not path.isdir(full): 1041 continue 1042 try: 1043 # we don't want time directories 1044 float(d) 1045 continue 1046 except ValueError: 1047 pass 1048 c=self.__classifyDirectory(full) 1049 use=path.join(dirName,d) 1050 if c=="timeline": 1051 self.__postprocInfo["timelines"].append(use) 1052 elif c=="sample": 1053 self.__postprocInfo["samples"].append(use) 1054 elif c=="surface": 1055 self.__postprocInfo["surfaces"].append(use) 1056 elif c=="distribution": 1057 self.__postprocInfo["distributions"].append(use) 1058 elif c==None: 1059 pass 1060 else: 1061 error("Unknown classification",c,"for",full)
1062
1063 - def __scanPostproc(self):
1064 self.__postprocInfo={"timelines":[], 1065 "samples":[], 1066 "distributions":[], 1067 "surfaces":[]} 1068 for d in self.__postprocDirs: 1069 self.__scanForPostproc(d)
1070 1071 @property
1072 - def pickledData(self):
1073 """Get the pickled data files. Newest first""" 1074 dirAndTime=[] 1075 for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: 1076 for g in glob.glob(path.join(self.name,"*.analyzed")): 1077 pName=path.join(g,f) 1078 if path.exists(pName): 1079 dirAndTime.append((path.getmtime(pName),pName)) 1080 dirAndTime.sort(cmp=lambda x,y:cmp(y[0],x[0])) 1081 return [s[len(self.name)+1:] for t,s in dirAndTime]
1082 1083 @property
1084 - def pickledPlots(self):
1085 """Get the pickled plot files. Newest first""" 1086 dirAndTime=[] 1087 for g in glob.glob(path.join(self.name,"*.analyzed")): 1088 pName=path.join(g,"pickledPlots") 1089 if path.exists(pName): 1090 dirAndTime.append((path.getmtime(pName),pName)) 1091 dirAndTime.sort(cmp=lambda x,y:cmp(y[0],x[0])) 1092 return [s[len(self.name)+1:] for t,s in dirAndTime]
1093 1094 @property
1095 - def timelines(self):
1096 """Return sub-directories that contain timeline-data""" 1097 if "timelines" not in self.__postprocInfo: 1098 self.__scanPostproc() 1099 return self.__postprocInfo["timelines"]
1100 1101 @property
1102 - def distributions(self):
1103 """Return sub-directories that contain distribution-data""" 1104 if "distributions" not in self.__postprocInfo: 1105 self.__scanPostproc() 1106 return self.__postprocInfo["distributions"]
1107 1108 @property
1109 - def samples(self):
1110 """Return sub-directories that contain sample-data""" 1111 if "samples" not in self.__postprocInfo: 1112 self.__scanPostproc() 1113 return self.__postprocInfo["samples"]
1114 1115 @property
1116 - def surfaces(self):
1117 if "surfaces" not in self.__postprocInfo: 1118 self.__scanPostproc() 1119 return self.__postprocInfo["surfaces"]
1120
1121 - def getParametersFromFile(self):
1122 """Get Parameters from the file created by PrepareCase""" 1123 fName=path.join(self.name,"PyFoamPrepareCaseParameters") 1124 if path.exists(fName): 1125 return ParsedParameterFile(fName,noHeader=True).content 1126 else: 1127 return {}
1128
1129 -class ChemkinSolutionDirectory(SolutionDirectory):
1130 """Solution directory with a directory for the Chemkin-files""" 1131 1132 chemkinName = "chemkin" 1133
1134 - def __init__(self,name,archive="ArchiveDir"):
1135 SolutionDirectory.__init__(self,name,archive=archive) 1136 1137 self.addToClone(self.chemkinName)
1138
1139 - def chemkinDir(self):
1140 """@rtype: str 1141 @return: The directory with the Chemkin-Files""" 1142 1143 return path.join(self.name,self.chemkinName)
1144
1145 -class NoTouchSolutionDirectory(SolutionDirectory):
1146 """Convenience class that makes sure that nothing new is created""" 1147
1148 - def __init__(self, 1149 name, 1150 region=None):
1151 SolutionDirectory.__init__(self, 1152 name, 1153 archive=None, 1154 paraviewLink=False, 1155 region=region)
1156 1157 # Should work with Python3 and Python2 1158