1
2 """Parameter file is read into memory and modified there"""
3
4 from FileBasis import FileBasisBackup
5 from PyFoam.Basics.PlyParser import PlyParser
6 from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator
7
8 from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList
9
10 from PyFoam.Error import error
11
12 from os import path
13 from copy import deepcopy
14
16 """ Parameterfile whose complete representation is read into
17 memory, can be manipulated and afterwards written to disk"""
18
19 - def __init__(self,
20 name,
21 backup=False,
22 debug=False,
23 boundaryDict=False,
24 listDict=False,
25 listDictWithHeader=False,
26 listLengthUnparsed=None,
27 noHeader=False,
28 noBody=False,
29 doMacroExpansion=False,
30 dontRead=False):
31 """@param name: The name of the parameter file
32 @param backup: create a backup-copy of the file
33 @param boundaryDict: the file to parse is a boundary file
34 @param listDict: the file only contains a list
35 @param listDictWithHeader: the file only contains a list and a header
36 @param listLengthUnparsed: Lists longer than that length are not parsed
37 @param noHeader: don't expect a header
38 @param noBody: don't read the body of the file (only the header)
39 @param doMacroExpansion: expand #include and $var
40 @param dontRead: Do not read the file during construction
41 """
42
43 self.noHeader=noHeader
44 self.noBody=noBody
45 FileBasisBackup.__init__(self,name,backup=backup)
46 self.debug=debug
47 self.boundaryDict=boundaryDict
48 self.listDict=listDict
49 self.listDictWithHeader=listDictWithHeader
50 self.listLengthUnparsed=listLengthUnparsed
51 self.doMacros=doMacroExpansion
52
53 self.header=None
54 self.content=None
55
56 if not dontRead:
57 self.readFile()
58
60 """Constructs a representation of the file"""
61 parser=FoamFileParser(content,
62 debug=self.debug,
63 fName=self.name,
64 boundaryDict=self.boundaryDict,
65 listDict=self.listDict,
66 listDictWithHeader=self.listDictWithHeader,
67 listLengthUnparsed=self.listLengthUnparsed,
68 noHeader=self.noHeader,
69 noBody=self.noBody,
70 doMacroExpansion=self.doMacros)
71
72 self.content=parser.getData()
73 self.header=parser.getHeader()
74 return self.content
75
77 return key in self.content
78
80 return self.content[key]
81
83 self.content[key]=value
84
87
89 return len(self.content)
90
92 for key in self.content:
93 yield key
94
96 """Generates a string from the contents in memory
97 Used to be called makeString"""
98
99 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n"
100
101 generator=FoamFileGenerator(self.content,header=self.header)
102 string+=str(generator)
103
104 return string
105
107 """A specialization that is used to only write to the file"""
108 - def __init__(self,
109 name,
110 backup=False,
111 className="dictionary",
112 objectName=None):
113 ParsedParameterFile.__init__(self,
114 name,
115 backup=backup,
116 dontRead=True)
117
118 if objectName==None:
119 objectName=path.basename(name)
120
121 self.content={}
122 self.header={"version":"2.0",
123 "format":"ascii",
124 "class":className,
125 "object":objectName}
126
128 """Class that parses a string that contains the contents of an
129 OpenFOAM-file and builds a nested structure of directories and
130 lists from it"""
131
132 - def __init__(self,
133 content,
134 fName=None,
135 debug=False,
136 noHeader=False,
137 noBody=False,
138 doMacroExpansion=False,
139 boundaryDict=False,
140 listDict=False,
141 listDictWithHeader=False,
142 listLengthUnparsed=None):
143 """@param content: the string to be parsed
144 @param fName: Name of the actual file (if any)
145 @param debug: output debug information during parsing
146 @param noHeader: switch that turns off the parsing of the header"""
147
148 self.fName=fName
149 self.data=None
150 self.header=None
151 self.debug=debug
152 self.listLengthUnparsed=listLengthUnparsed
153 self.doMacros=doMacroExpansion
154
155 startCnt=0
156
157 if noBody:
158 self.start='noBody'
159 startCnt+=1
160
161 if noHeader:
162 self.start='noHeader'
163 startCnt+=1
164
165 if listDict:
166 self.start='pureList'
167 startCnt+=1
168
169 if listDictWithHeader:
170 self.start='pureListWithHeader'
171 startCnt+=1
172
173 if boundaryDict:
174 self.start='boundaryDict'
175 startCnt+=1
176
177 if startCnt>1:
178 error("Only one start symbol can be specified.",startCnt,"are specified")
179
180 PlyParser.__init__(self,debug=debug)
181
182
183
184
185 self.emptyCnt=0
186
187 self.temp=None
188 self.rootDict=True
189
190 self.header,self.data=self.parse(content)
191
193 return key in self.data
194
196 return self.data[key]
197
200
203
204
205
206
207
208
209
211 if self.fName==None:
212 return path.curdir
213 else:
214 return path.dirname(self.fName)
215
217 """ Get the data structure"""
218 return self.data
219
221 """ Get the OpenFOAM-header"""
222 return self.header
223
224 - def printContext(self,c,ind):
225 """Prints the context of the current index"""
226 print "------"
227 print c[max(0,ind-100):max(0,ind-1)]
228 print "------"
229 print ">",c[ind-1],"<"
230 print "------"
231 print c[min(len(c),ind):min(len(c),ind+100)]
232 print "------"
233
235 """Prints the error message of the parser and exit"""
236 print "PARSER ERROR:",text
237 print "On index",ind
238 self.printContext(c,ind)
239 raise PyFoamParserError("Unspecified")
240
241 tokens = (
242 'NAME',
243 'ICONST',
244 'FCONST',
245 'SCONST',
246 'FOAMFILE',
247 'UNIFORM',
248 'NONUNIFORM',
249 'UNPARSEDCHUNK',
250 'REACTION',
251 'SUBSTITUTION',
252 'MERGE',
253 'OVERWRITE',
254 'ERROR',
255 'DEFAULT',
256 'INCLUDE',
257 'REMOVE',
258 'INPUTMODE',
259 'KANALGITTER',
260 )
261
262 reserved = {
263 'FoamFile' : 'FOAMFILE',
264 'uniform' : 'UNIFORM',
265 'nonuniform' : 'NONUNIFORM',
266 'include' : 'INCLUDE',
267 'remove' : 'REMOVE',
268 'inputMode' : 'INPUTMODE',
269 'merge' : 'MERGE',
270 'overwrite' : 'OVERWRITE',
271 'error' : 'ERROR',
272 'default' : 'DEFAULT',
273 }
274
275 states = (
276 ('unparsed', 'exclusive'),
277 )
278
280 r'\('
281 t.lexer.level+=1
282
283
285 r'\)'
286 t.lexer.level-=1
287
288 if t.lexer.level < 0 :
289 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1]
290
291 t.lexer.lexpos-=1
292 t.type = "UNPARSEDCHUNK"
293 t.lexer.lineno += t.value.count('\n')
294 t.lexer.begin('INITIAL')
295 return t
296
297 t_unparsed_ignore = ' \t\n0123456789.-+e'
298
300 print "Error",t.lexer.lexdata[t.lexer.lexpos]
301 t.lexer.skip(1)
302
313
315 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*'
316 t.type=self.reserved.get(t.value,'SUBSTITUTION')
317 if t.value[-1]==")":
318 if t.value.count(")")>t.value.count("("):
319
320 t.value=t.value[:-1]
321 t.lexer.lexpos-=1
322
323 return t
324
325 t_KANALGITTER = r'\#'
326
327 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
328
329 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
330
331 t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
332
333 literals = "(){};[]"
334
335 t_ignore=" \t\r"
336
337
339 r'\n+'
340 t.lexer.lineno += len(t.value)
341 now=t.lexer.lexpos
342 next=t.lexer.lexdata.find('\n',now)
343 if next>=0:
344 line=t.lexer.lexdata[now:next]
345 pos=line.find("=")
346 if pos>=0:
347 if (line.find("//")>=0 and line.find("//")<pos) or (line.find("/*")>=0 and line.find("/*")<pos):
348 return
349 t.value = line
350 t.type = "REACTION"
351 t.lexer.lineno += 1
352 t.lexer.lexpos = next
353 return t
354
355
360
361
363 print "Illegal character '%s'" % t.value[0]
364 t.lexer.skip(1)
365
367 'global : header clearTemp dictbody'
368 p[0] = ( p[1] , p[3] )
369
371 'clearTemp :'
372 self.rootDict=True
373 self.temp=None
374
376 'gotHeader :'
377 p.lexer.lexpos=len(p.lexer.lexdata)
378
379 - def p_noBody(self,p):
380 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' '''
381 p[0] = ( p[3] , {} )
382
384 'noHeader : dictbody'
385 p[0] = ( None , p[1] )
386
388 'pureList : list'
389 p[0] = ( None , p[1] )
390
392 '''pureListWithHeader : header list
393 | header prelist '''
394 p[0] = ( p[1] , p[2] )
395
397 '''boundaryDict : header list
398 | header prelist '''
399
400 p[0] = ( p[1] , p[2] )
401
403 'header : FOAMFILE dictionary'
404 p[0] = p[2]
405
407 '''macro : KANALGITTER include
408 | KANALGITTER inputMode
409 | KANALGITTER remove'''
410 p[0] = p[1]+p[2]+"\n"
411 if self.doMacros:
412 p[0]="// "+p[0]
413
415 '''include : INCLUDE SCONST'''
416 if self.doMacros:
417 fName=path.join(self.directory(),p[2][1:-1])
418 data=ParsedParameterFile(fName,noHeader=True)
419 if self.temp==None:
420 self.temp=DictProxy()
421 for k in data:
422 self.temp[k]=data[k]
423
424 p[0] = p[1] + " " + p[2]
425
432
434 '''remove : REMOVE word
435 | REMOVE wlist'''
436 p[0] = p[1] + " "
437 if type(p[2])==str:
438 p[0]+=p[2]
439 else:
440 p[0]+="( "
441 for w in p[2]:
442 p[0]+=w+" "
443 p[0]+=")"
444
446 '''integer : ICONST'''
447 p[0] = int(p[1])
448
450 '''integer : FCONST'''
451 p[0] = float(p[1])
452
454 '''dictionary : '{' dictbody '}'
455 | '{' '}' '''
456 if len(p)==4:
457 p[0] = p[2]
458 else:
459 p[0] = DictProxy()
460
461 - def p_dictbody(self,p):
462 '''dictbody : dictbody dictline
463 | dictline
464 | empty'''
465
466 if len(p)==3:
467 p[0]=p[1]
468 p[0][p[2][0]]=p[2][1]
469 else:
470 p[0]=DictProxy()
471
472 if self.temp==None:
473 self.temp=p[0]
474 elif self.rootDict:
475 for k,v in self.temp.iteritems():
476 if type(k)!=int:
477 p[0][k]=v
478 else:
479 p[0][self.emptyCnt]=v
480 self.emptyCnt+=1
481
482 self.temp=p[0]
483
484 self.rootDict=False
485
486 if p[1]:
487 p[0][p[1][0]]=p[1][1]
488
489
491 '''list : '(' itemlist ')' '''
492 p[0] = p[2]
493 if len(p[2])==3 or len(p[2])==9 or len(p[2])==6:
494 isVector=True
495 for i in p[2]:
496 try:
497 float(i)
498 except:
499 isVector=False
500 if isVector:
501 if len(p[2])==3:
502 p[0]=apply(Vector,p[2])
503 elif len(p[2])==9:
504 p[0]=apply(Tensor,p[2])
505 else:
506 p[0]=apply(SymmTensor,p[2])
507
509 '''wlist : '(' wordlist ')' '''
510 p[0] = p[2]
511
513 '''unparsed : UNPARSEDCHUNK'''
514 p[0] = Unparsed(p[1])
515
517 '''prelist_seen : '''
518 if self.listLengthUnparsed!=None:
519
520 if int(p[-1])>=self.listLengthUnparsed:
521
522 p.lexer.begin('unparsed')
523 p.lexer.level=0
524 p.lexer.code_start = p.lexer.lexpos
525
526
527
528
529
530
531
533 '''prelist : integer prelist_seen '(' itemlist ')'
534 | integer prelist_seen '(' unparsed ')' '''
535 if type(p[4])==Unparsed:
536 p[0] = UnparsedList(int(p[1]),p[4].data)
537 else:
538 p[0] = p[4]
539
541 '''itemlist : itemlist item
542 | item '''
543 if len(p)==2:
544 if p[1]==None:
545 p[0]=[]
546 else:
547 p[0]=[ p[1] ]
548 else:
549 p[0]=p[1]
550 p[0].append(p[2])
551
553 '''wordlist : wordlist word
554 | word '''
555 if len(p)==2:
556 if p[1]==None:
557 p[0]=[]
558 else:
559 p[0]=[ p[1] ]
560 else:
561 p[0]=p[1]
562 p[0].append(p[2])
563
565 '''word : NAME
566 | UNIFORM
567 | NONUNIFORM
568 | MERGE
569 | OVERWRITE
570 | DEFAULT
571 | ERROR'''
572 p[0]=p[1]
573
575 '''substitution : SUBSTITUTION'''
576 if self.doMacros:
577 nm=p[1][1:]
578 p[0]="<Symbol '"+nm+"' not found>"
579 if self.temp==None:
580 return
581 if nm in self.temp:
582 p[0]=deepcopy(self.temp[nm])
583 else:
584 p[0]=p[1]
585
587 '''dictline : word dictitem ';'
588 | word list ';'
589 | word prelist ';'
590 | word fieldvalue ';'
591 | macro
592 | word dictionary'''
593 if len(p)==4 and type(p[2])==list:
594
595 doAgain=True
596 tmp=p[2]
597 while doAgain:
598 doAgain=False
599 for i in range(len(tmp)-1):
600 if type(tmp[i])==int and type(tmp[i+1]) in [list]:
601 if tmp[i]==len(tmp[i+1]):
602 nix=tmp[:i]+tmp[i+1:]
603 for i in range(len(tmp)):
604 tmp.pop()
605 tmp.extend(nix)
606 doAgain=True
607 break
608 if len(p)>=3:
609 p[0] = ( p[1] , p[2] )
610 else:
611 p[0] = ( self.emptyCnt , p[1] )
612 self.emptyCnt+=1
613
615 '''number : integer
616 | FCONST'''
617 p[0] = p[1]
618
620 '''dimension : '[' number number number number number number number ']'
621 | '[' number number number number number ']' '''
622 result=p[2:-1]
623 if len(result)==5:
624 result+=[0,0]
625
626 p[0]=apply(Dimension,result)
627
629 '''vector : '(' number number number ')' '''
630 p[0]=apply(Vector,p[2:5])
631
633 '''tensor : '(' number number number number number number number number number ')' '''
634 p[0]=apply(Tensor,p[2:11])
635
637 '''symmtensor : '(' number number number number number number ')' '''
638 p[0]=apply(SymmTensor,p[2:8])
639
646
651
653 '''dictitem : longitem
654 | pitem'''
655 if type(p[1])==tuple:
656 p[0]=TupleProxy(p[1])
657 else:
658 p[0] = p[1]
659
661 '''longitem : pitemlist pitem'''
662 p[0] = p[1]+(p[2],)
663
665 '''pitemlist : pitemlist pitem
666 | pitem '''
667 if len(p)==2:
668 p[0]=(p[1],)
669 else:
670
671
672
673 p[0]=p[1]+(p[2],)
674
676 '''pitem : word
677 | SCONST
678 | number
679 | dictionary
680 | list
681 | dimension
682 | substitution
683 | empty'''
684 p[0] = p[1]
685
687 '''item : pitem
688 | REACTION
689 | list
690 | dictionary'''
691 p[0] = p[1]
692
696
699
700
701
704 self.descr=descr
705 self.data=data
706
708 result="Error in PyFoamParser: '"+self.descr+"'"
709 if self.data!=None:
710 val=self.data.value
711 if len(val)>100:
712 val=val[:40]+" .... "+val[-40:]
713
714 result+=" @ %r (Type: %s ) in line %d at position %d" % (val,
715 self.data.type,
716 self.data.lineno,
717 self.data.lexpos)
718
719 return result
720
723
725 """Convenience class that parses only a headerless OpenFOAM dictionary"""
726
727 - def __init__(self,content,debug=False):
732
735
737 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file"""
738
739 - def __init__(self,name,backup=False,debug=False):
744
745 - def parse(self,content):
746 """Constructs a representation of the file"""
747 temp=ParsedParameterFile.parse(self,content)
748 self.content={}
749 for i in range(0,len(temp),2):
750 self.content[temp[i]]=temp[i+1]
751 return self.content
752
754 string="// File generated by PyFoam - sorry for the ugliness\n\n"
755 temp=[]
756 for k,v in self.content.iteritems():
757 temp.append((k,v))
758
759 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"])))
760
761 temp2=[]
762
763 for b in temp:
764 temp2.append(b[0])
765 temp2.append(b[1])
766
767 generator=FoamFileGenerator(temp2,header=self.header)
768 string+=str(generator)
769
770 return string
771
773 """Only parse the header of a file"""
774
777
779 return self.header[name]
780
782 return name in self.header
783
785 return len(self.header)
786