Package PyFoam :: Package RunDictionary :: Module ParsedParameterFile
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.RunDictionary.ParsedParameterFile

   1  #  ICE Revision: $Id: ParsedParameterFile.py 12791 2013-02-05 13:40:06Z bgschaid $ 
   2  """Parameter file is read into memory and modified there""" 
   3   
   4  from PyFoam.RunDictionary.FileBasis import FileBasisBackup 
   5  from PyFoam.Basics.PlyParser import PlyParser 
   6  from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator 
   7   
   8  from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList,Codestream,DictRedirection,BinaryBlob,BinaryList 
   9   
  10  from PyFoam.Error import error,warning,FatalErrorPyFoamException 
  11   
  12  from os import path 
  13  from copy import deepcopy 
  14  import sys 
  15   
  16  from PyFoam.ThirdParty.six import print_,integer_types,iteritems 
  17   
18 -class ParsedParameterFile(FileBasisBackup):
19 """ Parameterfile whose complete representation is read into 20 memory, can be manipulated and afterwards written to disk""" 21
22 - def __init__(self, 23 name, 24 backup=False, 25 debug=False, 26 boundaryDict=False, 27 listDict=False, 28 listDictWithHeader=False, 29 listLengthUnparsed=None, 30 preserveComments=True, 31 noHeader=False, 32 binaryMode=False, 33 treatBinaryAsASCII=False, 34 noBody=False, 35 doMacroExpansion=False, 36 dontRead=False, 37 noVectorOrTensor=False, 38 dictStack=None, 39 createZipped=True, 40 longListOutputThreshold=20):
41 """@param name: The name of the parameter file 42 @param backup: create a backup-copy of the file 43 @param boundaryDict: the file to parse is a boundary file 44 @param listDict: the file only contains a list 45 @param listDictWithHeader: the file only contains a list and a header 46 @param listLengthUnparsed: Lists longer than that length are not parsed 47 @param binaryMode: Parse long lists in binary mode (to be overridden by 48 the settings in the header). 49 @param treatBinaryAsASCII: even if the header says that this is a 50 binary file treat it like an ASCII-file 51 @param noHeader: don't expect a header 52 @param noBody: don't read the body of the file (only the header) 53 @param doMacroExpansion: expand #include and $var 54 @param noVectorOrTensor: short lists of length 3, 6 an 9 are NOT 55 interpreted as vectors or tensors 56 @param dontRead: Do not read the file during construction 57 @param longListOutputThreshold: Lists that are longer than this are 58 prefixed with a length 59 @param dictStack: dictionary stack for lookup (only used for include) 60 """ 61 62 self.noHeader=noHeader 63 self.noBody=noBody 64 FileBasisBackup.__init__(self, 65 name, 66 backup=backup, 67 createZipped=createZipped) 68 self.debug=debug 69 self.boundaryDict=boundaryDict 70 self.listDict=listDict 71 self.listDictWithHeader=listDictWithHeader 72 self.listLengthUnparsed=listLengthUnparsed 73 self.doMacros=doMacroExpansion 74 self.preserveComments=preserveComments 75 self.noVectorOrTensor=noVectorOrTensor 76 self.header=None 77 self.content=None 78 self.longListOutputThreshold=longListOutputThreshold 79 self.binaryMode=binaryMode 80 self.treatBinaryAsASCII=treatBinaryAsASCII 81 self.lastDecoration="" 82 self.dictStack=dictStack 83 84 if not dontRead: 85 self.readFile()
86
87 - def parse(self,content):
88 """Constructs a representation of the file""" 89 try: 90 parser=FoamFileParser(content, 91 debug=self.debug, 92 fName=self.name, 93 boundaryDict=self.boundaryDict, 94 listDict=self.listDict, 95 listDictWithHeader=self.listDictWithHeader, 96 listLengthUnparsed=self.listLengthUnparsed, 97 noHeader=self.noHeader, 98 noBody=self.noBody, 99 preserveComments=self.preserveComments, 100 binaryMode=self.binaryMode, 101 treatBinaryAsASCII=self.treatBinaryAsASCII, 102 noVectorOrTensor=self.noVectorOrTensor, 103 dictStack=self.dictStack, 104 doMacroExpansion=self.doMacros) 105 except BinaryParserError: 106 e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e' 107 if not self.treatBinaryAsASCII: 108 # Retrying in ASCII-mode although the file thinks it is binary 109 parser=FoamFileParser(content, 110 debug=self.debug, 111 fName=self.name, 112 boundaryDict=self.boundaryDict, 113 listDict=self.listDict, 114 listDictWithHeader=self.listDictWithHeader, 115 listLengthUnparsed=self.listLengthUnparsed, 116 noHeader=self.noHeader, 117 noBody=self.noBody, 118 preserveComments=self.preserveComments, 119 binaryMode=self.binaryMode, 120 treatBinaryAsASCII=True, 121 noVectorOrTensor=self.noVectorOrTensor, 122 dictStack=self.dictStack, 123 doMacroExpansion=self.doMacros) 124 else: 125 raise e 126 127 self.content=parser.getData() 128 self.header=parser.getHeader() 129 self.lastDecoration=parser._decorationBuffer 130 131 return self.content
132
133 - def __contains__(self,key):
134 return key in self.content
135
136 - def __getitem__(self,key):
137 return self.content[key]
138
139 - def __setitem__(self,key,value):
140 self.content[key]=value
141
142 - def __delitem__(self,key):
143 del self.content[key]
144
145 - def __len__(self):
146 return len(self.content)
147
148 - def __iter__(self):
149 for key in self.content: 150 yield key
151
152 - def __str__(self):
153 """Generates a string from the contents in memory 154 Used to be called makeString""" 155 156 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n" 157 158 generator=FoamFileGenerator(self.content, 159 header=self.header, 160 longListThreshold=self.longListOutputThreshold) 161 string+=generator.makeString(firstLevel=True) 162 163 if len(self.lastDecoration)>0: 164 string+="\n\n"+self.lastDecoration 165 166 return string
167
168 - def getValueDict(self):
169 """Get a dictionary with the values with the decorators removed""" 170 result={} 171 if self.content: 172 for k in self.content: 173 if type(k) not in integer_types: 174 result[k]=self.content[k] 175 return result
176
177 -class WriteParameterFile(ParsedParameterFile):
178 """A specialization that is used to only write to the file"""
179 - def __init__(self, 180 name, 181 backup=False, 182 className="dictionary", 183 objectName=None, 184 createZipped=False):
185 ParsedParameterFile.__init__(self, 186 name, 187 backup=backup, 188 dontRead=True, 189 createZipped=createZipped) 190 191 if objectName==None: 192 objectName=path.basename(name) 193 194 self.content=DictProxy() 195 self.header={"version":"2.0", 196 "format":"ascii", 197 "class":className, 198 "object":objectName}
199
200 -class Enumerate(object):
201 - def __init__(self, names):
202 for number, name in enumerate(names): 203 setattr(self, name, number)
204 205 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"]) 206
207 -class FoamFileParser(PlyParser):
208 """Class that parses a string that contains the contents of an 209 OpenFOAM-file and builds a nested structure of directories and 210 lists from it""" 211
212 - def __init__(self, 213 content, 214 fName=None, 215 debug=False, 216 noHeader=False, 217 noBody=False, 218 doMacroExpansion=False, 219 boundaryDict=False, 220 preserveComments=True, 221 preserveNewlines=True, 222 listDict=False, 223 listDictWithHeader=False, 224 listLengthUnparsed=None, 225 binaryMode=False, 226 treatBinaryAsASCII=False, 227 duplicateCheck=False, 228 noVectorOrTensor=False, 229 dictStack=None, 230 duplicateFail=True):
231 """@param content: the string to be parsed 232 @param fName: Name of the actual file (if any) 233 @param debug: output debug information during parsing 234 @param noHeader: switch that turns off the parsing of the header 235 @param duplicateCheck: Check for duplicates in dictionaries 236 @param duplicateFail: Fail if a duplicate is discovered""" 237 238 self.binaryMode=binaryMode 239 self.treatBinaryAsASCII=treatBinaryAsASCII 240 self.fName=fName 241 self.data=None 242 self.header=None 243 self.debug=debug 244 self.listLengthUnparsed=listLengthUnparsed 245 self.doMacros=doMacroExpansion 246 self.preserveComments=preserveComments 247 self.preserveNewLines=preserveNewlines 248 self.duplicateCheck=duplicateCheck 249 self.duplicateFail=duplicateFail 250 self.noVectorOrTensor=noVectorOrTensor 251 self.inHeader=True 252 self.inBinary=False 253 254 # Make sure that the first comment is discarded 255 self.collectDecorations=False 256 self.inputMode=inputModes.merge 257 258 self._decorationBuffer="" 259 260 startCnt=0 261 262 self.dictStack=dictStack 263 if self.dictStack==None: 264 self.dictStack=[DictProxy()] 265 266 if noBody: 267 self.start='noBody' 268 startCnt+=1 269 270 if noHeader: 271 self.inHeader=False 272 self.start='noHeader' 273 startCnt+=1 274 self.collectDecorations=True 275 276 if listDict: 277 self.inHeader=False 278 self.start='pureList' 279 startCnt+=1 280 self.dictStack=[] 281 self.collectDecorations=True 282 283 if listDictWithHeader: 284 self.start='pureListWithHeader' 285 startCnt+=1 286 287 if boundaryDict: 288 self.start='boundaryDict' 289 startCnt+=1 290 291 if startCnt>1: 292 error("Only one start symbol can be specified.",startCnt,"are specified") 293 294 PlyParser.__init__(self,debug=debug) 295 296 #sys.setrecursionlimit(50000) 297 #print sys.getrecursionlimit() 298 299 self.emptyCnt=0 300 301 self.header,self.data=self.parse(content)
302
303 - def __contains__(self,key):
304 return key in self.data
305
306 - def __getitem__(self,key):
307 return self.data[key]
308
309 - def __setitem__(self,key,value):
310 self.data[key]=value
311
312 - def __delitem__(self,key):
313 del self.data[key]
314
315 - def __iter__(self):
316 for key in self.data: 317 yield key
318 319 ## def __len__(self): 320 ## if self.data==None: 321 ## return 0 322 ## else: 323 ## return len(self.data) 324
325 - def resetDecoration(self):
326 self._decorationBuffer=""
327
328 - def addToDecoration(self,text):
329 if self.collectDecorations: 330 self._decorationBuffer+=text
331
332 - def addCommentToDecorations(self,text):
333 if self.preserveComments: 334 self.addToDecoration(text)
335
336 - def addNewlinesToDecorations(self,text):
337 if self.preserveNewLines: 338 self.addToDecoration(text)
339
340 - def getDecoration(self):
341 tmp=self._decorationBuffer 342 self.resetDecoration() 343 if len(tmp)>0: 344 if tmp[-1]=='\n': 345 tmp=tmp[:-1] 346 return tmp
347
348 - def directory(self):
349 if self.fName==None: 350 return path.curdir 351 else: 352 return path.dirname(self.fName)
353
354 - def getData(self):
355 """ Get the data structure""" 356 return self.data
357
358 - def getHeader(self):
359 """ Get the OpenFOAM-header""" 360 return self.header
361
362 - def printContext(self,c,ind):
363 """Prints the context of the current index""" 364 print_("------") 365 print_(c[max(0,ind-100):max(0,ind-1)]) 366 print_("------") 367 print_(">",c[ind-1],"<") 368 print_("------") 369 print_(c[min(len(c),ind):min(len(c),ind+100)]) 370 print_("------")
371
372 - def parserError(self,text,c,ind):
373 """Prints the error message of the parser and exit""" 374 print_("PARSER ERROR:",text) 375 print_("On index",ind) 376 self.printContext(c,ind) 377 raise PyFoamParserError("Unspecified")
378
379 - def condenseAllPreFixLists(self,orig):
380 """Checks whether this list is a list that consists only of prefix-Lists""" 381 isAllPreList=False 382 if (len(orig) % 2)==0: 383 isAllPreList=True 384 for i in range(0,len(orig),2): 385 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]): 386 if len(orig[i+1])!=orig[i]: 387 isAllPreList=False 388 break 389 else: 390 isAllPreList=False 391 break 392 393 if isAllPreList: 394 return orig[1::2] 395 else: 396 return orig
397 398 tokens = ( 399 'NAME', 400 'ICONST', 401 'FCONST', 402 'SCONST', 403 'FOAMFILE', 404 'UNIFORM', 405 'NONUNIFORM', 406 'UNPARSEDCHUNK', 407 'CODESTREAMCHUNK', 408 'REACTION', 409 'SUBSTITUTION', 410 'MERGE', 411 'OVERWRITE', 412 'ERROR', 413 'WARN', 414 'PROTECT', 415 'DEFAULT', 416 'INCLUDE', 417 'INCLUDEIFPRESENT', 418 'REMOVE', 419 'INPUTMODE', 420 'KANALGITTER', 421 'CODESTART', 422 'CODEEND', 423 'BINARYBLOB', 424 ) 425 426 reserved = { 427 'FoamFile' : 'FOAMFILE', 428 'uniform' : 'UNIFORM', 429 'nonuniform' : 'NONUNIFORM', 430 'include' : 'INCLUDE', 431 'includeIfPresent': 'INCLUDEIFPRESENT', 432 'remove' : 'REMOVE', 433 'inputMode' : 'INPUTMODE', 434 'merge' : 'MERGE', 435 'overwrite' : 'OVERWRITE', 436 'error' : 'ERROR', 437 'warn' : 'WARN', 438 'protect' : 'PROTECT', 439 'default' : 'DEFAULT', 440 } 441 442 states = ( 443 ('unparsed', 'exclusive'), 444 ('codestream', 'exclusive'), 445 ('mlcomment', 'exclusive'), 446 ('binaryblob', 'exclusive'), 447 ) 448
449 - def t_unparsed_left(self,t):
450 r'\(' 451 t.lexer.level+=1
452 # print "left",t.lexer.level, 453
454 - def t_unparsed_right(self,t):
455 r'\)' 456 t.lexer.level-=1 457 # print "right",t.lexer.level, 458 if t.lexer.level < 0 : 459 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1] 460 # print t.value 461 t.lexer.lexpos-=1 462 t.type = "UNPARSEDCHUNK" 463 t.lexer.lineno += t.value.count('\n') 464 t.lexer.begin('INITIAL') 465 return t
466 467 t_unparsed_ignore = ' \t\n0123456789.-+e' 468
469 - def t_unparsed_error(self,t):
470 print_("Error",t.lexer.lexdata[t.lexer.lexpos]) 471 t.lexer.skip(1)
472 473 t_binaryblob_ignore = '' 474
475 - def t_binaryblob_close(self,t):
476 r"\)" 477 size=t.lexer.lexpos-t.lexer.binary_start-1 478 # print size,ord(t.lexer.lexdata[t.lexer.lexpos-1]),ord(t.lexer.lexdata[t.lexer.lexpos]),ord(t.lexer.lexdata[t.lexer.lexpos+1]) 479 # print size,ord(t.lexer.lexdata[t.lexer.binary_start-1]),ord(t.lexer.lexdata[t.lexer.binary_start]) 480 # print size % (t.lexer.binary_listlen), len(t.lexer.lexdata) 481 if (size % t.lexer.binary_listlen)==0: 482 # length of blob is multiple of the listlength 483 nextChar=t.lexer.lexdata[t.lexer.lexpos] 484 nextNextChar=t.lexer.lexdata[t.lexer.lexpos+1] 485 if (nextChar in [';','\n'] and nextNextChar=='\n'): 486 t.value = t.lexer.lexdata[t.lexer.binary_start:t.lexer.lexpos-1] 487 assert(len(t.value)%t.lexer.binary_listlen == 0) 488 t.lexer.lexpos-=1 489 t.type = "BINARYBLOB" 490 t.lexer.lineno += t.value.count('\n') 491 t.lexer.begin('INITIAL') 492 self.inBinary=False 493 return t
494
495 - def t_binaryblob_throwaway(self,t):
496 r'[^\)]' 497 pass
498
499 - def t_binaryblob_error(self,t):
500 print_("Error",t.lexer.lexdata[t.lexer.lexpos]) 501 t.lexer.skip(1)
502
503 - def t_codestream_end(self,t):
504 r"\#\}" 505 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-2] 506 t.lexer.lexpos-=2 507 t.type = "CODESTREAMCHUNK" 508 t.lexer.lineno += t.value.count('\n') 509 t.lexer.begin('INITIAL') 510 return t
511 512 t_codestream_ignore = '' 513
514 - def t_codestream_throwaway(self,t):
515 r'[^#]' 516 pass
517
518 - def t_codestream_error(self,t):
519 print_("Error",t.lexer.lexdata[t.lexer.lexpos]) 520 t.lexer.skip(1)
521
522 - def t_NAME(self,t):
523 r'[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 524 t.type=self.reserved.get(t.value,'NAME') 525 if t.value[-1]==")": 526 if t.value.count(")")>t.value.count("("): 527 # Give back the last ) because it propably belongs to a list 528 t.value=t.value[:-1] 529 t.lexer.lexpos-=1 530 531 return t
532
533 - def t_SUBSTITUITION(self,t):
534 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 535 t.type=self.reserved.get(t.value,'SUBSTITUTION') 536 if t.value[-1]==")": 537 if t.value.count(")")>t.value.count("("): 538 # Give back the last ) because it propably belongs to a list 539 t.value=t.value[:-1] 540 t.lexer.lexpos-=1 541 542 return t
543 544 t_CODESTART = r'\#\{' 545 546 t_CODEEND = r'\#\}' 547 548 t_KANALGITTER = r'\#' 549 550 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 551 552 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 553 554 t_SCONST = r'\"([^\\\n]|(\\.))*?\"' 555 556 literals = "(){};[]" 557 558 t_ignore=" \t\r" 559 560 # Define a rule so we can track line numbers
561 - def t_newline(self,t):
562 r'\n+' 563 t.lexer.lineno += len(t.value) 564 now=t.lexer.lexpos 565 next=t.lexer.lexdata.find('\n',now) 566 if next>=0: 567 line=t.lexer.lexdata[now:next] 568 pos=line.find("=") 569 570 if pos>=0 and not self.binaryMode: 571 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos): 572 return 573 t.value = line 574 t.type = "REACTION" 575 t.lexer.lineno += 1 576 t.lexer.lexpos = next 577 return t
578 # self.addNewlinesToDecorations(t.value) 579 580 # C++ comment (ignore)
581 - def t_ccode_comment(self,t):
582 r'//.*' 583 t.lexer.lineno += t.value.count('\n') 584 self.addCommentToDecorations(t.value)
585
586 - def t_startmlcomment(self,t):
587 r'/\*' 588 t.lexer.begin('mlcomment') 589 self.mllevel=1 590 self.mlcomment_start = t.lexer.lexpos-2
591
592 - def t_mlcomment_newlevel(self,t):
593 r'/\*' 594 self.mllevel+=1
595
596 - def t_mlcomment_endcomment(self,t):
597 r'\*/' 598 self.mllevel-=1 599 if self.mllevel<=0: 600 t.lexer.begin('INITIAL') 601 mlcomment=t.lexer.lexdata[self.mlcomment_start:t.lexer.lexpos] 602 t.lexer.lineno += mlcomment.count('\n') 603 self.addCommentToDecorations(mlcomment)
604
605 - def t_mlcomment_throwaway(self,t):
606 r'[^\*]' 607 pass
608 609 t_mlcomment_ignore = '' 610
611 - def t_mlcomment_error(self,t):
612 if t.lexer.lexdata[t.lexer.lexpos]!="*": 613 print_("Error",t.lexer.lexdata[t.lexer.lexpos]) 614 t.lexer.skip(1)
615 616 # Error handling rule
617 - def t_error(self,t):
618 msg="Illegal character '%s' in line %d (pos: %d)" % ( 619 t.value[0], 620 t.lexer.lineno, 621 t.lexer.lexpos) 622 raise PyFoamParserError(msg)
623 # t.lexer.skip(1) # the old days when illegal characters were accepted 624
625 - def p_global(self,p):
626 'global : header dictbody' 627 p[0] = ( p[1] , p[2] )
628
629 - def p_gotHeader(self,p):
630 'gotHeader :' 631 p.lexer.lexpos=len(p.lexer.lexdata) 632 self.inHeader=False
633
634 - def p_noBody(self,p):
635 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' ''' 636 p[0] = ( p[3] , {} )
637
638 - def p_noHeader(self,p):
639 'noHeader : dictbody' 640 p[0] = ( None , p[1] )
641
642 - def p_pureList(self,p):
643 'pureList : list' 644 p[0] = ( None , p[1] )
645
646 - def p_onlyListOrPList(self,p):
647 '''onlyListOrPList : list 648 | prelist ''' 649 p[0]=p[1]
650
651 - def p_pureListWithHeader(self,p):
652 '''pureListWithHeader : header onlyListOrPList''' 653 p[0] = ( p[1] , p[2] )
654
655 - def p_afterHeader(self,p):
656 'afterHeader :' 657 pass
658
659 - def p_boundaryDict(self,p):
660 '''boundaryDict : header list 661 | header prelist ''' 662 # p[0] = ( p[1] , dict(zip(p[2][::2],p[2][1::2])) ) 663 p[0] = ( p[1] , p[2] )
664
665 - def p_header(self,p):
666 'header : FOAMFILE dictionary' 667 self.inHeader=False 668 p[0] = p[2] 669 670 # if p[0]["format"]=="binary": 671 # if not self.treatBinaryAsASCII: 672 # self.binaryMode=True 673 # else: 674 # self.binaryMode=False 675 # elif p[0]["format"]=="ascii": 676 # self.binaryMode=False 677 # else: 678 # raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"]) 679 680 self.collectDecorations=True
681
682 - def p_macro(self,p):
683 '''macro : KANALGITTER include 684 | KANALGITTER inputMode 685 | KANALGITTER remove''' 686 p[0] = p[1]+p[2]+"\n" 687 if self.doMacros: 688 p[0]="// "+p[0]
689
690 - def p_include(self,p):
691 '''include : INCLUDE SCONST 692 | INCLUDEIFPRESENT SCONST''' 693 if self.doMacros: 694 fName=path.join(self.directory(),p[2][1:-1]) 695 read=True 696 if p[1]=="includeIfPresent" and not path.exists(fName): 697 read=False 698 if read and not path.exists(fName): 699 raise PyFoamParserError("The included file "+fName+" does not exist") 700 if read: 701 data=ParsedParameterFile(fName, 702 noHeader=True, 703 dictStack=self.dictStack, 704 doMacroExpansion=self.doMacros) 705 into=self.dictStack[-1] 706 for k in data: 707 into[k]=data[k] 708 709 p[0] = p[1] + " " + p[2]
710
711 - def p_inputMode(self,p):
712 '''inputMode : INPUTMODE ERROR 713 | INPUTMODE WARN 714 | INPUTMODE PROTECT 715 | INPUTMODE DEFAULT 716 | INPUTMODE MERGE 717 | INPUTMODE OVERWRITE''' 718 p[0] = p[1] + " " + p[2] 719 self.inputMode=getattr(inputModes,p[2])
720
721 - def p_remove(self,p):
722 '''remove : REMOVE word 723 | REMOVE wlist''' 724 p[0] = p[1] + " " 725 if type(p[2])==str: 726 p[0]+=p[2] 727 else: 728 p[0]+="( " 729 for w in p[2]: 730 p[0]+=w+" " 731 p[0]+=")"
732
733 - def p_integer(self,p):
734 '''integer : ICONST''' 735 p[0] = int(p[1])
736
737 - def p_float(self,p):
738 '''integer : FCONST''' 739 p[0] = float(p[1])
740
741 - def p_enter_dict(self,p):
742 '''enter_dict :''' 743 self.dictStack.append(DictProxy())
744
745 - def p_exit_dict(self,p):
746 '''exit_dict :''' 747 p[0]=self.dictStack.pop()
748
749 - def p_dictionary(self,p):
750 '''dictionary : '{' enter_dict dictbody '}' exit_dict 751 | '{' '}' ''' 752 if len(p)==6: 753 p[0] = p[5] 754 else: 755 p[0] = DictProxy()
756
757 - def p_dictbody(self,p):
758 '''dictbody : dictbody dictline 759 | dictline 760 | empty''' 761 762 if len(p)==3: 763 p[0]=p[1] 764 if self.duplicateCheck: 765 if p[2][0] in p[0]: 766 if self.duplicateFail: 767 error("Key",p[2][0],"already defined") 768 else: 769 warning("Key",p[2][0],"already defined") 770 if type(p[2][0])==DictRedirection and p[2][1]=='': 771 p[0].addRedirection(p[2][0]) 772 else: 773 if type(p[2][1])==DictRedirection: 774 p[0][p[2][0]]=p[2][1].getContent() 775 else: 776 p[0][p[2][0]]=p[2][1] 777 p[0].addDecoration(p[2][0],self.getDecoration()) 778 else: 779 p[0]=self.dictStack[-1] 780 781 if p[1]: 782 if type(p[1][0])==DictRedirection and p[1][1]=='': 783 p[0].addRedirection(p[1][0]) 784 else: 785 if type(p[1][1])==DictRedirection: 786 p[0][p[1][0]]=p[1][1].getContent() 787 else: 788 p[0][p[1][0]]=p[1][1]
789 790
791 - def p_list(self,p):
792 '''list : '(' itemlist ')' ''' 793 p[0] = self.condenseAllPreFixLists(p[2]) 794 if not self.noVectorOrTensor and ( 795 len(p[2])==3 or len(p[2])==9 or len(p[2])==6): 796 isVector=True 797 for i in p[2]: 798 try: 799 float(i) 800 except: 801 isVector=False 802 if isVector: 803 if len(p[2])==3: 804 p[0]=Vector(*p[2]) 805 elif len(p[2])==9: 806 p[0]=Tensor(*p[2]) 807 else: 808 p[0]=SymmTensor(*p[2])
809
810 - def p_wlist(self,p):
811 '''wlist : '(' wordlist ')' ''' 812 p[0] = p[2]
813
814 - def p_unparsed(self,p):
815 '''unparsed : UNPARSEDCHUNK''' 816 p[0] = Unparsed(p[1])
817
818 - def p_binaryblob(self,p):
819 '''binaryblob : BINARYBLOB''' 820 p[0] = BinaryBlob(p[1])
821
822 - def p_prelist_seen(self,p):
823 '''prelist_seen : ''' 824 if self.binaryMode: 825 p.lexer.begin('binaryblob') 826 p.lexer.binary_start = p.lexer.lexpos 827 p.lexer.binary_listlen = p[-1] 828 self.inBinary=True 829 elif self.listLengthUnparsed!=None: 830 if int(p[-1])>=self.listLengthUnparsed: 831 p.lexer.begin('unparsed') 832 p.lexer.level=0 833 p.lexer.code_start = p.lexer.lexpos
834
835 - def p_codestream(self,p):
836 '''codestream : codeSeen CODESTART CODESTREAMCHUNK CODEEND ''' 837 p[0] = Codestream(p[3])
838
839 - def p_codeSeen(self,p):
840 '''codeSeen : ''' 841 p.lexer.begin('codestream') 842 p.lexer.level=0 843 p.lexer.code_start = p.lexer.lexpos
844
845 - def p_prelist(self,p):
846 '''prelist : integer prelist_seen '(' itemlist ')' 847 | integer prelist_seen '(' binaryblob ')' 848 | integer prelist_seen '(' unparsed ')' ''' 849 if type(p[4])==Unparsed: 850 p[0] = UnparsedList(int(p[1]),p[4].data) 851 elif type(p[4])==BinaryBlob: 852 p[0] = BinaryList(int(p[1]),p[4].data) 853 else: 854 p[0] = self.condenseAllPreFixLists(p[4])
855
856 - def p_itemlist(self,p):
857 '''itemlist : itemlist item 858 | itemlist ';' 859 | item ''' 860 if len(p)==2: 861 if p[1]==None: 862 p[0]=[] 863 else: 864 p[0]=[ p[1] ] 865 else: 866 p[0]=p[1] 867 if p[2]!=';': 868 p[0].append(p[2])
869
870 - def p_wordlist(self,p):
871 '''wordlist : wordlist word 872 | word ''' 873 if len(p)==2: 874 if p[1]==None: 875 p[0]=[] 876 else: 877 p[0]=[ p[1] ] 878 else: 879 p[0]=p[1] 880 p[0].append(p[2])
881
882 - def p_word(self,p):
883 '''word : NAME 884 | UNIFORM 885 | NONUNIFORM 886 | MERGE 887 | OVERWRITE 888 | DEFAULT 889 | WARN 890 | PROTECT 891 | ERROR''' 892 p[0]=p[1]
893
894 - def p_substitution(self,p):
895 '''substitution : SUBSTITUTION''' 896 if self.doMacros: 897 nm=p[1][1:] 898 p[0]="<Symbol '"+nm+"' not found>" 899 for di in reversed(self.dictStack): 900 if nm in di: 901 p[0]=DictRedirection(deepcopy(di[nm]), 902 di[nm], 903 nm) 904 return 905 else: 906 p[0]=p[1]
907
908 - def p_dictkey(self,p):
909 '''dictkey : word 910 | SCONST''' 911 p[0]=p[1]
912
913 - def p_dictline(self,p):
914 '''dictline : dictkey dictitem ';' 915 | dictkey list ';' 916 | dictkey prelist ';' 917 | dictkey fieldvalue ';' 918 | macro 919 | substitution ';' 920 | dictkey codestream ';' 921 | dictkey dictionary''' 922 if len(p)==4 and self.inHeader and p[1]=="format" and type(p[2])==str: 923 if p[2]=="binary": 924 if not self.treatBinaryAsASCII: 925 self.binaryMode=True 926 else: 927 self.binaryMode=False 928 elif p[2]=="ascii": 929 self.binaryMode=False 930 else: 931 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"]) 932 933 if len(p)==4 and type(p[2])==list: 934 # remove the prefix from long lists (if present) 935 doAgain=True 936 tmp=p[2] 937 while doAgain: 938 doAgain=False 939 for i in range(len(tmp)-1): 940 if type(tmp[i])==int and type(tmp[i+1]) in [list]: 941 if tmp[i]==len(tmp[i+1]): 942 nix=tmp[:i]+tmp[i+1:] 943 for i in range(len(tmp)): 944 tmp.pop() 945 tmp.extend(nix) 946 doAgain=True 947 break 948 if len(p)==4: 949 p[0] = ( p[1] , p[2] ) 950 elif len(p)==3: 951 if p[2]==';': 952 p[0]= (p[1],'') 953 else: 954 p[0] = ( p[1] , p[2] ) 955 else: 956 p[0] = ( self.emptyCnt , p[1] ) 957 self.emptyCnt+=1
958
959 - def p_number(self,p):
960 '''number : integer 961 | FCONST''' 962 p[0] = p[1]
963
964 - def p_dimension(self,p):
965 '''dimension : '[' number number number number number number number ']' 966 | '[' number number number number number ']' ''' 967 result=p[2:-1] 968 if len(result)==5: 969 result+=[0,0] 970 971 p[0]=Dimension(*result)
972
973 - def p_vector(self,p):
974 '''vector : '(' number number number ')' ''' 975 if self.noVectorOrTensor: 976 p[0]=p[2:5] 977 else: 978 p[0]=Vector(*p[2:5])
979
980 - def p_tensor(self,p):
981 '''tensor : '(' number number number number number number number number number ')' ''' 982 if self.noVectorOrTensor: 983 p[0]=p[2:11] 984 else: 985 p[0]=Tensor(*p[2:11])
986
987 - def p_symmtensor(self,p):
988 '''symmtensor : '(' number number number number number number ')' ''' 989 if self.noVectorOrTensor: 990 p[0]=p[2:8] 991 else: 992 p[0]=SymmTensor(*p[2:8])
993
994 - def p_fieldvalue_uniform(self,p):
995 '''fieldvalue : UNIFORM number 996 | UNIFORM vector 997 | UNIFORM tensor 998 | UNIFORM symmtensor''' 999 p[0] = Field(p[2])
1000
1001 - def p_fieldvalue_nonuniform(self,p):
1002 '''fieldvalue : NONUNIFORM NAME list 1003 | NONUNIFORM prelist 1004 | NONUNIFORM NAME prelist''' 1005 if len(p)==4: 1006 p[0] = Field(p[3],name=p[2]) 1007 else: 1008 p[0] = Field(p[2])
1009
1010 - def p_dictitem(self,p):
1011 '''dictitem : longitem 1012 | pitem''' 1013 if type(p[1])==tuple: 1014 if len(p[1])==2 and p[1][0]=="uniform": 1015 p[0]=Field(p[1][1]) 1016 elif len(p[1])==3 and p[1][0]=="nonuniform": 1017 p[0]=Field(p[1][2],name=p[1][1]) 1018 else: 1019 p[0]=TupleProxy(p[1]) 1020 else: 1021 p[0] = p[1]
1022
1023 - def p_longitem(self,p):
1024 '''longitem : pitemlist pitem''' 1025 p[0] = p[1]+(p[2],)
1026
1027 - def p_pitemlist(self,p):
1028 '''pitemlist : pitemlist pitem 1029 | pitem ''' 1030 if len(p)==2: 1031 p[0]=(p[1],) 1032 else: 1033 ## print type(p[1][-1]) 1034 ## if type(p[1][-1])==int and type(p[2])==tuple: 1035 ## print "Hepp",p[2] 1036 p[0]=p[1]+(p[2],)
1037
1038 - def p_pitem(self,p):
1039 '''pitem : word 1040 | SCONST 1041 | number 1042 | dictionary 1043 | list 1044 | dimension 1045 | substitution 1046 | empty''' 1047 p[0] = p[1]
1048
1049 - def p_item(self,p):
1050 '''item : pitem 1051 | REACTION 1052 | list 1053 | dictionary''' 1054 p[0] = p[1]
1055
1056 - def p_empty(self,p):
1057 'empty :' 1058 pass
1059
1060 - def p_error(self,p):
1061 if self.inBinary: 1062 raise BinaryParserError("Problem reading binary", p) # .type, p.lineno 1063 else: 1064 raise PyFoamParserError("Syntax error at token", p) # .type, p.lineno
1065 # Just discard the token and tell the parser it's okay. 1066 # self.yacc.errok() 1067
1068 -class PyFoamParserError(FatalErrorPyFoamException):
1069 - def __init__(self,descr,data=None):
1070 FatalErrorPyFoamException.__init__(self,"Parser Error:",descr) 1071 self.descr=descr 1072 self.data=data
1073
1074 - def __str__(self):
1075 result="Error in PyFoamParser: '"+self.descr+"'" 1076 if self.data!=None: 1077 val=self.data.value 1078 if len(val)>100: 1079 val=val[:40]+" .... "+val[-40:] 1080 1081 result+=" @ %r (Type: %s ) in line %d at position %d" % (val, 1082 self.data.type, 1083 self.data.lineno, 1084 self.data.lexpos) 1085 else: 1086 result+=" NONE" 1087 1088 return result
1089
1090 - def __repr__(self):
1091 return str(self)
1092
1093 -class BinaryParserError(PyFoamParserError):
1094 - def __init__(self,descr,data=None):
1095 PyFoamParserError.__init__(self,descr,data)
1096
1097 -class FoamStringParser(FoamFileParser):
1098 """Convenience class that parses only a headerless OpenFOAM dictionary""" 1099
1100 - def __init__(self, 1101 content, 1102 debug=False, 1103 noVectorOrTensor=False, 1104 duplicateCheck=False, 1105 listDict=False, 1106 doMacroExpansion=False, 1107 duplicateFail=False):
1108 """@param content: the string to be parsed 1109 @param debug: output debug information during parsing""" 1110 1111 FoamFileParser.__init__(self, 1112 content, 1113 debug=debug, 1114 noHeader=not listDict, 1115 boundaryDict=False, 1116 listDict=listDict, 1117 noVectorOrTensor=noVectorOrTensor, 1118 duplicateCheck=duplicateCheck, 1119 doMacroExpansion=doMacroExpansion, 1120 duplicateFail=duplicateFail)
1121
1122 - def __str__(self):
1123 return str(FoamFileGenerator(self.data))
1124
1125 -class ParsedBoundaryDict(ParsedParameterFile):
1126 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file""" 1127
1128 - def __init__(self, 1129 name, 1130 treatBinaryAsASCII=False, 1131 backup=False, 1132 debug=False):
1133 """@param name: The name of the parameter file 1134 @param backup: create a backup-copy of the file""" 1135 1136 ParsedParameterFile.__init__(self, 1137 name, 1138 backup=backup, 1139 treatBinaryAsASCII=treatBinaryAsASCII, 1140 debug=debug, 1141 boundaryDict=True)
1142
1143 - def parse(self,content):
1144 """Constructs a representation of the file""" 1145 temp=ParsedParameterFile.parse(self,content) 1146 self.content=DictProxy() 1147 for i in range(0,len(temp),2): 1148 self.content[temp[i]]=temp[i+1] 1149 return self.content
1150
1151 - def __str__(self):
1152 string="// File generated by PyFoam - sorry for the ugliness\n\n" 1153 temp=[] 1154 for k,v in iteritems(self.content): 1155 temp.append((k,v)) 1156 1157 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"]))) 1158 1159 temp2=[] 1160 1161 for b in temp: 1162 temp2.append(b[0]) 1163 temp2.append(b[1]) 1164 1165 generator=FoamFileGenerator(temp2,header=self.header) 1166 string+=str(generator) 1167 1168 return string
1169
1170 -class ParsedFileHeader(ParsedParameterFile):
1171 """Only parse the header of a file""" 1172
1173 - def __init__(self,name):
1174 ParsedParameterFile.__init__(self,name,backup=False,noBody=True)
1175
1176 - def __getitem__(self,name):
1177 return self.header[name]
1178
1179 - def __contains__(self,name):
1180 return name in self.header
1181
1182 - def __len__(self):
1183 return len(self.header)
1184 1185 # Should work with Python3 and Python2 1186