Package PyFoam :: Package RunDictionary :: Module ParsedParameterFile
[hide private]
[frames] | no frames]

Source Code for Module PyFoam.RunDictionary.ParsedParameterFile

  1  #  ICE Revision: $Id: /local/openfoam/Python/PyFoam/PyFoam/RunDictionary/ParsedParameterFile.py 7397 2011-04-03T18:35:06.691206Z bgschaid  $  
  2  """Parameter file is read into memory and modified there""" 
  3   
  4  from FileBasis import FileBasisBackup 
  5  from PyFoam.Basics.PlyParser import PlyParser 
  6  from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator 
  7   
  8  from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList 
  9   
 10  from PyFoam.Error import error,warning,FatalErrorPyFoamException 
 11   
 12  from os import path 
 13  from copy import deepcopy 
 14   
15 -class ParsedParameterFile(FileBasisBackup):
16 """ Parameterfile whose complete representation is read into 17 memory, can be manipulated and afterwards written to disk""" 18
19 - def __init__(self, 20 name, 21 backup=False, 22 debug=False, 23 boundaryDict=False, 24 listDict=False, 25 listDictWithHeader=False, 26 listLengthUnparsed=None, 27 noHeader=False, 28 binaryMode=False, 29 noBody=False, 30 doMacroExpansion=False, 31 dontRead=False, 32 createZipped=True):
33 """@param name: The name of the parameter file 34 @param backup: create a backup-copy of the file 35 @param boundaryDict: the file to parse is a boundary file 36 @param listDict: the file only contains a list 37 @param listDictWithHeader: the file only contains a list and a header 38 @param listLengthUnparsed: Lists longer than that length are not parsed 39 @param binaryMode: Parse long lists in binary mode (to be overridden by 40 the settings in the header 41 @param noHeader: don't expect a header 42 @param noBody: don't read the body of the file (only the header) 43 @param doMacroExpansion: expand #include and $var 44 @param dontRead: Do not read the file during construction 45 """ 46 47 self.noHeader=noHeader 48 self.noBody=noBody 49 FileBasisBackup.__init__(self, 50 name, 51 backup=backup, 52 createZipped=createZipped) 53 self.debug=debug 54 self.boundaryDict=boundaryDict 55 self.listDict=listDict 56 self.listDictWithHeader=listDictWithHeader 57 self.listLengthUnparsed=listLengthUnparsed 58 self.doMacros=doMacroExpansion 59 60 self.header=None 61 self.content=None 62 63 self.binaryMode=binaryMode 64 65 if not dontRead: 66 self.readFile()
67
68 - def parse(self,content):
69 """Constructs a representation of the file""" 70 parser=FoamFileParser(content, 71 debug=self.debug, 72 fName=self.name, 73 boundaryDict=self.boundaryDict, 74 listDict=self.listDict, 75 listDictWithHeader=self.listDictWithHeader, 76 listLengthUnparsed=self.listLengthUnparsed, 77 noHeader=self.noHeader, 78 noBody=self.noBody, 79 binaryMode=self.binaryMode, 80 doMacroExpansion=self.doMacros) 81 82 self.content=parser.getData() 83 self.header=parser.getHeader() 84 return self.content
85
86 - def __contains__(self,key):
87 return key in self.content
88
89 - def __getitem__(self,key):
90 return self.content[key]
91
92 - def __setitem__(self,key,value):
93 self.content[key]=value
94
95 - def __delitem__(self,key):
96 del self.content[key]
97
98 - def __len__(self):
99 return len(self.content)
100
101 - def __iter__(self):
102 for key in self.content: 103 yield key
104
105 - def __str__(self):
106 """Generates a string from the contents in memory 107 Used to be called makeString""" 108 109 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n" 110 111 generator=FoamFileGenerator(self.content,header=self.header) 112 string+=generator.makeString(firstLevel=True) 113 114 return string
115
116 -class WriteParameterFile(ParsedParameterFile):
117 """A specialization that is used to only write to the file"""
118 - def __init__(self, 119 name, 120 backup=False, 121 className="dictionary", 122 objectName=None, 123 createZipped=False):
124 ParsedParameterFile.__init__(self, 125 name, 126 backup=backup, 127 dontRead=True, 128 createZipped=createZipped) 129 130 if objectName==None: 131 objectName=path.basename(name) 132 133 self.content={} 134 self.header={"version":"2.0", 135 "format":"ascii", 136 "class":className, 137 "object":objectName}
138
139 -class Enumerate(object):
140 - def __init__(self, names):
141 for number, name in enumerate(names): 142 setattr(self, name, number)
143 144 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"]) 145
146 -class FoamFileParser(PlyParser):
147 """Class that parses a string that contains the contents of an 148 OpenFOAM-file and builds a nested structure of directories and 149 lists from it""" 150
151 - def __init__(self, 152 content, 153 fName=None, 154 debug=False, 155 noHeader=False, 156 noBody=False, 157 doMacroExpansion=False, 158 boundaryDict=False, 159 preserveComments=True, 160 preserveNewlines=True, 161 listDict=False, 162 listDictWithHeader=False, 163 listLengthUnparsed=None, 164 binaryMode=False, 165 duplicateCheck=False, 166 duplicateFail=True):
167 """@param content: the string to be parsed 168 @param fName: Name of the actual file (if any) 169 @param debug: output debug information during parsing 170 @param noHeader: switch that turns off the parsing of the header 171 @param duplicateCheck: Check for duplicates in dictionaries 172 @param duplicateFail: Fail if a duplicate is discovered""" 173 174 self.binaryMode=binaryMode 175 self.fName=fName 176 self.data=None 177 self.header=None 178 self.debug=debug 179 self.listLengthUnparsed=listLengthUnparsed 180 self.doMacros=doMacroExpansion 181 self.preserveComments=preserveComments 182 self.preserveNewLines=preserveNewlines 183 self.duplicateCheck=duplicateCheck 184 self.duplicateFail=duplicateFail 185 186 self.collectDecorations=False 187 self.inputMode=inputModes.merge 188 189 self._decorationBuffer="" 190 191 startCnt=0 192 193 self.dictStack=[DictProxy()] 194 195 if noBody: 196 self.start='noBody' 197 startCnt+=1 198 199 if noHeader: 200 self.start='noHeader' 201 startCnt+=1 202 203 if listDict: 204 self.start='pureList' 205 startCnt+=1 206 self.dictStack=[] 207 208 if listDictWithHeader: 209 self.start='pureListWithHeader' 210 startCnt+=1 211 212 if boundaryDict: 213 self.start='boundaryDict' 214 startCnt+=1 215 216 if startCnt>1: 217 error("Only one start symbol can be specified.",startCnt,"are specified") 218 219 PlyParser.__init__(self,debug=debug) 220 221 #sys.setrecursionlimit(50000) 222 #print sys.getrecursionlimit() 223 224 self.emptyCnt=0 225 226 self.header,self.data=self.parse(content)
227
228 - def __contains__(self,key):
229 return key in self.data
230
231 - def __getitem__(self,key):
232 return self.data[key]
233
234 - def __setitem__(self,key,value):
235 self.data[key]=value
236
237 - def __delitem__(self,key):
238 del self.data[key]
239
240 - def __iter__(self):
241 for key in self.data: 242 yield key
243 244 ## def __len__(self): 245 ## if self.data==None: 246 ## return 0 247 ## else: 248 ## return len(self.data) 249
250 - def resetDecoration(self):
251 self._decorationBuffer=""
252
253 - def addToDecoration(self,text):
254 if self.collectDecorations: 255 self._decorationBuffer+=text
256
257 - def addCommentToDecorations(self,text):
258 if self.preserveComments: 259 self.addToDecoration(text)
260
261 - def addNewlinesToDecorations(self,text):
262 if self.preserveNewLines: 263 self.addToDecoration(text)
264
265 - def getDecoration(self):
266 tmp=self._decorationBuffer 267 self.resetDecoration() 268 if len(tmp)>0: 269 if tmp[-1]=='\n': 270 tmp=tmp[:-1] 271 return tmp
272
273 - def directory(self):
274 if self.fName==None: 275 return path.curdir 276 else: 277 return path.dirname(self.fName)
278
279 - def getData(self):
280 """ Get the data structure""" 281 return self.data
282
283 - def getHeader(self):
284 """ Get the OpenFOAM-header""" 285 return self.header
286
287 - def printContext(self,c,ind):
288 """Prints the context of the current index""" 289 print "------" 290 print c[max(0,ind-100):max(0,ind-1)] 291 print "------" 292 print ">",c[ind-1],"<" 293 print "------" 294 print c[min(len(c),ind):min(len(c),ind+100)] 295 print "------"
296
297 - def parserError(self,text,c,ind):
298 """Prints the error message of the parser and exit""" 299 print "PARSER ERROR:",text 300 print "On index",ind 301 self.printContext(c,ind) 302 raise PyFoamParserError("Unspecified")
303
304 - def condenseAllPreFixLists(self,orig):
305 """Checks whether this list is a list that consists only of prefix-Lists""" 306 isAllPreList=False 307 if (len(orig) % 2)==0: 308 isAllPreList=True 309 for i in range(0,len(orig),2): 310 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]): 311 if len(orig[i+1])!=orig[i]: 312 isAllPreList=False 313 break 314 else: 315 isAllPreList=False 316 break 317 318 if isAllPreList: 319 return orig[1::2] 320 else: 321 return orig
322 323 tokens = ( 324 'NAME', 325 'ICONST', 326 'FCONST', 327 'SCONST', 328 'FOAMFILE', 329 'UNIFORM', 330 'NONUNIFORM', 331 'UNPARSEDCHUNK', 332 'REACTION', 333 'SUBSTITUTION', 334 'MERGE', 335 'OVERWRITE', 336 'ERROR', 337 'WARN', 338 'PROTECT', 339 'DEFAULT', 340 'INCLUDE', 341 'INCLUDEIFPRESENT', 342 'REMOVE', 343 'INPUTMODE', 344 'KANALGITTER', 345 ) 346 347 reserved = { 348 'FoamFile' : 'FOAMFILE', 349 'uniform' : 'UNIFORM', 350 'nonuniform' : 'NONUNIFORM', 351 'include' : 'INCLUDE', 352 'includeIfPresent': 'INCLUDEIFPRESENT', 353 'remove' : 'REMOVE', 354 'inputMode' : 'INPUTMODE', 355 'merge' : 'MERGE', 356 'overwrite' : 'OVERWRITE', 357 'error' : 'ERROR', 358 'warn' : 'WARN', 359 'protect' : 'PROTECT', 360 'default' : 'DEFAULT', 361 } 362 363 states = ( 364 ('unparsed', 'exclusive'), 365 ) 366
367 - def t_unparsed_left(self,t):
368 r'\(' 369 t.lexer.level+=1
370 # print "left",t.lexer.level, 371
372 - def t_unparsed_right(self,t):
373 r'\)' 374 t.lexer.level-=1 375 # print "right",t.lexer.level, 376 if t.lexer.level < 0 : 377 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1] 378 # print t.value 379 t.lexer.lexpos-=1 380 t.type = "UNPARSEDCHUNK" 381 t.lexer.lineno += t.value.count('\n') 382 t.lexer.begin('INITIAL') 383 return t
384 385 t_unparsed_ignore = ' \t\n0123456789.-+e' 386
387 - def t_unparsed_error(self,t):
388 print "Error",t.lexer.lexdata[t.lexer.lexpos] 389 t.lexer.skip(1)
390
391 - def t_NAME(self,t):
392 r'[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 393 t.type=self.reserved.get(t.value,'NAME') 394 if t.value[-1]==")": 395 if t.value.count(")")>t.value.count("("): 396 # Give back the last ) because it propably belongs to a list 397 t.value=t.value[:-1] 398 t.lexer.lexpos-=1 399 400 return t
401
402 - def t_SUBSTITUITION(self,t):
403 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*' 404 t.type=self.reserved.get(t.value,'SUBSTITUTION') 405 if t.value[-1]==")": 406 if t.value.count(")")>t.value.count("("): 407 # Give back the last ) because it propably belongs to a list 408 t.value=t.value[:-1] 409 t.lexer.lexpos-=1 410 411 return t
412 413 t_KANALGITTER = r'\#' 414 415 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 416 417 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 418 419 t_SCONST = r'\"([^\\\n]|(\\.))*?\"' 420 421 literals = "(){};[]" 422 423 t_ignore=" \t\r" 424 425 # Define a rule so we can track line numbers
426 - def t_newline(self,t):
427 r'\n+' 428 t.lexer.lineno += len(t.value) 429 now=t.lexer.lexpos 430 next=t.lexer.lexdata.find('\n',now) 431 if next>=0: 432 line=t.lexer.lexdata[now:next] 433 pos=line.find("=") 434 if pos>=0: 435 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos): 436 return 437 t.value = line 438 t.type = "REACTION" 439 t.lexer.lineno += 1 440 t.lexer.lexpos = next 441 return t
442 # self.addNewlinesToDecorations(t.value) 443 444 # C or C++ comment (ignore)
445 - def t_ccode_comment(self,t):
446 r'(/\*(.|\n)*?\*/)|(//.*)' 447 t.lexer.lineno += t.value.count('\n') 448 self.addCommentToDecorations(t.value) 449 pass
450 451 # Error handling rule
452 - def t_error(self,t):
453 raise PyFoamParserError("Illegal character '%s'" % t.value[0])
454 # t.lexer.skip(1) # the old days when illegal characters were accepted 455
456 - def p_global(self,p):
457 'global : header dictbody' 458 p[0] = ( p[1] , p[2] )
459
460 - def p_gotHeader(self,p):
461 'gotHeader :' 462 p.lexer.lexpos=len(p.lexer.lexdata)
463
464 - def p_noBody(self,p):
465 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' ''' 466 p[0] = ( p[3] , {} )
467
468 - def p_noHeader(self,p):
469 'noHeader : dictbody' 470 p[0] = ( None , p[1] )
471
472 - def p_pureList(self,p):
473 'pureList : list' 474 p[0] = ( None , p[1] )
475
476 - def p_pureListWithHeader(self,p):
477 '''pureListWithHeader : header list 478 | header prelist ''' 479 p[0] = ( p[1] , p[2] )
480
481 - def p_boundaryDict(self,p):
482 '''boundaryDict : header list 483 | header prelist ''' 484 # p[0] = ( p[1] , dict(zip(p[2][::2],p[2][1::2])) ) 485 p[0] = ( p[1] , p[2] )
486
487 - def p_header(self,p):
488 'header : FOAMFILE dictionary' 489 p[0] = p[2] 490 if p[0]["format"]=="binary": 491 self.binaryMode=True 492 raise FatalErrorPyFoamException("Can not parse binary files. It is not implemented") 493 elif p[0]["format"]=="ascii": 494 self.binaryMode=False 495 else: 496 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"])
497 498
499 - def p_macro(self,p):
500 '''macro : KANALGITTER include 501 | KANALGITTER inputMode 502 | KANALGITTER remove''' 503 p[0] = p[1]+p[2]+"\n" 504 if self.doMacros: 505 p[0]="// "+p[0]
506
507 - def p_include(self,p):
508 '''include : INCLUDE SCONST 509 | INCLUDEIFPRESENT SCONST''' 510 if self.doMacros: 511 fName=path.join(self.directory(),p[2][1:-1]) 512 read=True 513 if p[1]=="includeIfPresent" and not path.exists(fName): 514 read=False 515 if read and not path.exists(fName): 516 raise PyFoamParserError("The included file "+fName+" does not exist") 517 if read: 518 data=ParsedParameterFile(fName,noHeader=True) 519 into=self.dictStack[-1] 520 for k in data: 521 into[k]=data[k] 522 523 p[0] = p[1] + " " + p[2]
524
525 - def p_inputMode(self,p):
526 '''inputMode : INPUTMODE ERROR 527 | INPUTMODE WARN 528 | INPUTMODE PROTECT 529 | INPUTMODE DEFAULT 530 | INPUTMODE MERGE 531 | INPUTMODE OVERWRITE''' 532 p[0] = p[1] + " " + p[2] 533 self.inputMode=getattr(inputModes,p[2])
534
535 - def p_remove(self,p):
536 '''remove : REMOVE word 537 | REMOVE wlist''' 538 p[0] = p[1] + " " 539 if type(p[2])==str: 540 p[0]+=p[2] 541 else: 542 p[0]+="( " 543 for w in p[2]: 544 p[0]+=w+" " 545 p[0]+=")"
546
547 - def p_integer(self,p):
548 '''integer : ICONST''' 549 p[0] = int(p[1])
550
551 - def p_float(self,p):
552 '''integer : FCONST''' 553 p[0] = float(p[1])
554
555 - def p_enter_dict(self,p):
556 '''enter_dict :''' 557 self.dictStack.append(DictProxy())
558
559 - def p_exit_dict(self,p):
560 '''exit_dict :''' 561 p[0]=self.dictStack.pop()
562
563 - def p_dictionary(self,p):
564 '''dictionary : '{' enter_dict dictbody '}' exit_dict 565 | '{' '}' ''' 566 if len(p)==6: 567 p[0] = p[5] 568 else: 569 p[0] = DictProxy()
570
571 - def p_dictbody(self,p):
572 '''dictbody : dictbody dictline 573 | dictline 574 | empty''' 575 576 if len(p)==3: 577 p[0]=p[1] 578 if self.duplicateCheck: 579 if p[2][0] in p[0]: 580 if self.duplicateFail: 581 error("Key",p[2][0],"already defined") 582 else: 583 warning("Key",p[2][0],"already defined") 584 p[0][p[2][0]]=p[2][1] 585 p[0].addDecoration(p[2][0],self.getDecoration()) 586 else: 587 p[0]=self.dictStack[-1] 588 589 if p[1]: 590 p[0][p[1][0]]=p[1][1]
591 592
593 - def p_list(self,p):
594 '''list : '(' itemlist ')' ''' 595 p[0] = self.condenseAllPreFixLists(p[2]) 596 if len(p[2])==3 or len(p[2])==9 or len(p[2])==6: 597 isVector=True 598 for i in p[2]: 599 try: 600 float(i) 601 except: 602 isVector=False 603 if isVector: 604 if len(p[2])==3: 605 p[0]=apply(Vector,p[2]) 606 elif len(p[2])==9: 607 p[0]=apply(Tensor,p[2]) 608 else: 609 p[0]=apply(SymmTensor,p[2])
610
611 - def p_wlist(self,p):
612 '''wlist : '(' wordlist ')' ''' 613 p[0] = p[2]
614
615 - def p_unparsed(self,p):
616 '''unparsed : UNPARSEDCHUNK''' 617 p[0] = Unparsed(p[1])
618
619 - def p_prelist_seen(self,p):
620 '''prelist_seen : ''' 621 if self.listLengthUnparsed!=None: 622 # print "Hepp" 623 if int(p[-1])>=self.listLengthUnparsed: 624 # print "Ho",p.lexer.lexpos,p.lexer.lexdata[p.lexer.lexpos-1:p.lexer.lexpos+2],p[1],len(p[1]) 625 p.lexer.begin('unparsed') 626 p.lexer.level=0 627 p.lexer.code_start = p.lexer.lexpos
628 629 # t=p.lexer.token() 630 631 ## print t.type 632 ## return t 633 # p[0] = None 634
635 - def p_prelist(self,p):
636 '''prelist : integer prelist_seen '(' itemlist ')' 637 | integer prelist_seen '(' unparsed ')' ''' 638 if type(p[4])==Unparsed: 639 p[0] = UnparsedList(int(p[1]),p[4].data) 640 else: 641 p[0] = self.condenseAllPreFixLists(p[4])
642
643 - def p_itemlist(self,p):
644 '''itemlist : itemlist item 645 | item ''' 646 if len(p)==2: 647 if p[1]==None: 648 p[0]=[] 649 else: 650 p[0]=[ p[1] ] 651 else: 652 p[0]=p[1] 653 p[0].append(p[2])
654
655 - def p_wordlist(self,p):
656 '''wordlist : wordlist word 657 | word ''' 658 if len(p)==2: 659 if p[1]==None: 660 p[0]=[] 661 else: 662 p[0]=[ p[1] ] 663 else: 664 p[0]=p[1] 665 p[0].append(p[2])
666
667 - def p_word(self,p):
668 '''word : NAME 669 | UNIFORM 670 | NONUNIFORM 671 | MERGE 672 | OVERWRITE 673 | DEFAULT 674 | WARN 675 | PROTECT 676 | ERROR''' 677 p[0]=p[1]
678
679 - def p_substitution(self,p):
680 '''substitution : SUBSTITUTION''' 681 if self.doMacros: 682 nm=p[1][1:] 683 p[0]="<Symbol '"+nm+"' not found>" 684 if nm in self.dictStack[0]: 685 p[0]=deepcopy(self.dictStack[0][nm]) 686 else: 687 p[0]=p[1]
688
689 - def p_dictkey(self,p):
690 '''dictkey : word 691 | SCONST''' 692 p[0]=p[1]
693
694 - def p_dictline(self,p):
695 '''dictline : dictkey dictitem ';' 696 | dictkey list ';' 697 | dictkey prelist ';' 698 | dictkey fieldvalue ';' 699 | macro 700 | substitution ';' 701 | dictkey dictionary''' 702 if len(p)==4 and type(p[2])==list: 703 # remove the prefix from long lists (if present) 704 doAgain=True 705 tmp=p[2] 706 while doAgain: 707 doAgain=False 708 for i in range(len(tmp)-1): 709 if type(tmp[i])==int and type(tmp[i+1]) in [list]: 710 if tmp[i]==len(tmp[i+1]): 711 nix=tmp[:i]+tmp[i+1:] 712 for i in range(len(tmp)): 713 tmp.pop() 714 tmp.extend(nix) 715 doAgain=True 716 break 717 if len(p)==4: 718 p[0] = ( p[1] , p[2] ) 719 elif len(p)==3: 720 if p[2]==';': 721 p[0]= (p[1],'') 722 else: 723 p[0] = ( p[1] , p[2] ) 724 else: 725 p[0] = ( self.emptyCnt , p[1] ) 726 self.emptyCnt+=1
727
728 - def p_number(self,p):
729 '''number : integer 730 | FCONST''' 731 p[0] = p[1]
732
733 - def p_dimension(self,p):
734 '''dimension : '[' number number number number number number number ']' 735 | '[' number number number number number ']' ''' 736 result=p[2:-1] 737 if len(result)==5: 738 result+=[0,0] 739 740 p[0]=apply(Dimension,result)
741
742 - def p_vector(self,p):
743 '''vector : '(' number number number ')' ''' 744 p[0]=apply(Vector,p[2:5])
745
746 - def p_tensor(self,p):
747 '''tensor : '(' number number number number number number number number number ')' ''' 748 p[0]=apply(Tensor,p[2:11])
749
750 - def p_symmtensor(self,p):
751 '''symmtensor : '(' number number number number number number ')' ''' 752 p[0]=apply(SymmTensor,p[2:8])
753
754 - def p_fieldvalue_uniform(self,p):
755 '''fieldvalue : UNIFORM number 756 | UNIFORM vector 757 | UNIFORM tensor 758 | UNIFORM symmtensor''' 759 p[0] = Field(p[2])
760
761 - def p_fieldvalue_nonuniform(self,p):
762 '''fieldvalue : NONUNIFORM NAME list 763 | NONUNIFORM NAME prelist''' 764 p[0] = Field(p[3],name=p[2])
765
766 - def p_dictitem(self,p):
767 '''dictitem : longitem 768 | pitem''' 769 if type(p[1])==tuple: 770 if len(p[1])==2 and p[1][0]=="uniform": 771 p[0]=Field(p[1][1]) 772 elif len(p[1])==3 and p[1][0]=="nonuniform": 773 p[0]=Field(p[1][2],name=p[1][1]) 774 else: 775 p[0]=TupleProxy(p[1]) 776 else: 777 p[0] = p[1]
778
779 - def p_longitem(self,p):
780 '''longitem : pitemlist pitem''' 781 p[0] = p[1]+(p[2],)
782
783 - def p_pitemlist(self,p):
784 '''pitemlist : pitemlist pitem 785 | pitem ''' 786 if len(p)==2: 787 p[0]=(p[1],) 788 else: 789 ## print type(p[1][-1]) 790 ## if type(p[1][-1])==int and type(p[2])==tuple: 791 ## print "Hepp",p[2] 792 p[0]=p[1]+(p[2],)
793
794 - def p_pitem(self,p):
795 '''pitem : word 796 | SCONST 797 | number 798 | dictionary 799 | list 800 | dimension 801 | substitution 802 | empty''' 803 p[0] = p[1]
804
805 - def p_item(self,p):
806 '''item : pitem 807 | REACTION 808 | list 809 | dictionary''' 810 p[0] = p[1]
811
812 - def p_empty(self,p):
813 'empty :' 814 pass
815
816 - def p_error(self,p):
817 raise PyFoamParserError("Syntax error at token", p) # .type, p.lineno
818 # Just discard the token and tell the parser it's okay. 819 # self.yacc.errok() 820
821 -class PyFoamParserError(FatalErrorPyFoamException):
822 - def __init__(self,descr,data=None):
823 FatalErrorPyFoamException.__init__(self,"Parser Error:",descr) 824 self.descr=descr 825 self.data=data
826
827 - def __str__(self):
828 result="Error in PyFoamParser: '"+self.descr+"'" 829 if self.data!=None: 830 val=self.data.value 831 if len(val)>100: 832 val=val[:40]+" .... "+val[-40:] 833 834 result+=" @ %r (Type: %s ) in line %d at position %d" % (val, 835 self.data.type, 836 self.data.lineno, 837 self.data.lexpos) 838 839 return result
840
841 - def __repr__(self):
842 return str(self)
843
844 -class FoamStringParser(FoamFileParser):
845 """Convenience class that parses only a headerless OpenFOAM dictionary""" 846
847 - def __init__(self, 848 content, 849 debug=False, 850 duplicateCheck=False, 851 duplicateFail=False):
852 """@param content: the string to be parsed 853 @param debug: output debug information during parsing""" 854 855 FoamFileParser.__init__(self, 856 content, 857 debug=debug, 858 noHeader=True, 859 boundaryDict=False, 860 duplicateCheck=duplicateCheck, 861 duplicateFail=duplicateFail)
862
863 - def __str__(self):
864 return str(FoamFileGenerator(self.data))
865
866 -class ParsedBoundaryDict(ParsedParameterFile):
867 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file""" 868
869 - def __init__(self,name,backup=False,debug=False):
870 """@param name: The name of the parameter file 871 @param backup: create a backup-copy of the file""" 872 873 ParsedParameterFile.__init__(self,name,backup=backup,debug=debug,boundaryDict=True)
874
875 - def parse(self,content):
876 """Constructs a representation of the file""" 877 temp=ParsedParameterFile.parse(self,content) 878 self.content={} 879 for i in range(0,len(temp),2): 880 self.content[temp[i]]=temp[i+1] 881 return self.content
882
883 - def __str__(self):
884 string="// File generated by PyFoam - sorry for the ugliness\n\n" 885 temp=[] 886 for k,v in self.content.iteritems(): 887 temp.append((k,v)) 888 889 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"]))) 890 891 temp2=[] 892 893 for b in temp: 894 temp2.append(b[0]) 895 temp2.append(b[1]) 896 897 generator=FoamFileGenerator(temp2,header=self.header) 898 string+=str(generator) 899 900 return string
901
902 -class ParsedFileHeader(ParsedParameterFile):
903 """Only parse the header of a file""" 904
905 - def __init__(self,name):
906 ParsedParameterFile.__init__(self,name,backup=False,noBody=True)
907
908 - def __getitem__(self,name):
909 return self.header[name]
910
911 - def __contains__(self,name):
912 return name in self.header
913
914 - def __len__(self):
915 return len(self.header)
916