1
2 """Parameter file is read into memory and modified there"""
3
4 from FileBasis import FileBasisBackup
5 from PyFoam.Basics.PlyParser import PlyParser
6 from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator
7
8 from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList,Codestream,DictRedirection
9
10 from PyFoam.Error import error,warning,FatalErrorPyFoamException
11
12 from os import path
13 from copy import deepcopy
14
16 """ Parameterfile whose complete representation is read into
17 memory, can be manipulated and afterwards written to disk"""
18
19 - def __init__(self,
20 name,
21 backup=False,
22 debug=False,
23 boundaryDict=False,
24 listDict=False,
25 listDictWithHeader=False,
26 listLengthUnparsed=None,
27 preserveComments=True,
28 noHeader=False,
29 binaryMode=False,
30 noBody=False,
31 doMacroExpansion=False,
32 dontRead=False,
33 noVectorOrTensor=False,
34 createZipped=True,
35 longListOutputThreshold=20):
36 """@param name: The name of the parameter file
37 @param backup: create a backup-copy of the file
38 @param boundaryDict: the file to parse is a boundary file
39 @param listDict: the file only contains a list
40 @param listDictWithHeader: the file only contains a list and a header
41 @param listLengthUnparsed: Lists longer than that length are not parsed
42 @param binaryMode: Parse long lists in binary mode (to be overridden by
43 the settings in the header
44 @param noHeader: don't expect a header
45 @param noBody: don't read the body of the file (only the header)
46 @param doMacroExpansion: expand #include and $var
47 @param noVectorOrTensor: short lists of length 3, 6 an 9 are NOT
48 interpreted as vectors or tensors
49 @param dontRead: Do not read the file during construction
50 @param longListOutputThreshold: Lists that are longer than this are
51 prefixed with a length
52 """
53
54 self.noHeader=noHeader
55 self.noBody=noBody
56 FileBasisBackup.__init__(self,
57 name,
58 backup=backup,
59 createZipped=createZipped)
60 self.debug=debug
61 self.boundaryDict=boundaryDict
62 self.listDict=listDict
63 self.listDictWithHeader=listDictWithHeader
64 self.listLengthUnparsed=listLengthUnparsed
65 self.doMacros=doMacroExpansion
66 self.preserveComments=preserveComments
67 self.noVectorOrTensor=noVectorOrTensor
68 self.header=None
69 self.content=None
70 self.longListOutputThreshold=longListOutputThreshold
71 self.binaryMode=binaryMode
72
73 if not dontRead:
74 self.readFile()
75
77 """Constructs a representation of the file"""
78 parser=FoamFileParser(content,
79 debug=self.debug,
80 fName=self.name,
81 boundaryDict=self.boundaryDict,
82 listDict=self.listDict,
83 listDictWithHeader=self.listDictWithHeader,
84 listLengthUnparsed=self.listLengthUnparsed,
85 noHeader=self.noHeader,
86 noBody=self.noBody,
87 preserveComments=self.preserveComments,
88 binaryMode=self.binaryMode,
89 noVectorOrTensor=self.noVectorOrTensor,
90 doMacroExpansion=self.doMacros)
91
92 self.content=parser.getData()
93 self.header=parser.getHeader()
94 return self.content
95
97 return key in self.content
98
100 return self.content[key]
101
103 self.content[key]=value
104
106 del self.content[key]
107
109 return len(self.content)
110
112 for key in self.content:
113 yield key
114
116 """Generates a string from the contents in memory
117 Used to be called makeString"""
118
119 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n"
120
121 generator=FoamFileGenerator(self.content,
122 header=self.header,
123 longListThreshold=self.longListOutputThreshold)
124 string+=generator.makeString(firstLevel=True)
125
126 return string
127
129 """A specialization that is used to only write to the file"""
130 - def __init__(self,
131 name,
132 backup=False,
133 className="dictionary",
134 objectName=None,
135 createZipped=False):
136 ParsedParameterFile.__init__(self,
137 name,
138 backup=backup,
139 dontRead=True,
140 createZipped=createZipped)
141
142 if objectName==None:
143 objectName=path.basename(name)
144
145 self.content={}
146 self.header={"version":"2.0",
147 "format":"ascii",
148 "class":className,
149 "object":objectName}
150
153 for number, name in enumerate(names):
154 setattr(self, name, number)
155
156 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"])
157
159 """Class that parses a string that contains the contents of an
160 OpenFOAM-file and builds a nested structure of directories and
161 lists from it"""
162
163 - def __init__(self,
164 content,
165 fName=None,
166 debug=False,
167 noHeader=False,
168 noBody=False,
169 doMacroExpansion=False,
170 boundaryDict=False,
171 preserveComments=True,
172 preserveNewlines=True,
173 listDict=False,
174 listDictWithHeader=False,
175 listLengthUnparsed=None,
176 binaryMode=False,
177 duplicateCheck=False,
178 noVectorOrTensor=False,
179 duplicateFail=True):
180 """@param content: the string to be parsed
181 @param fName: Name of the actual file (if any)
182 @param debug: output debug information during parsing
183 @param noHeader: switch that turns off the parsing of the header
184 @param duplicateCheck: Check for duplicates in dictionaries
185 @param duplicateFail: Fail if a duplicate is discovered"""
186
187 self.binaryMode=binaryMode
188 self.fName=fName
189 self.data=None
190 self.header=None
191 self.debug=debug
192 self.listLengthUnparsed=listLengthUnparsed
193 self.doMacros=doMacroExpansion
194 self.preserveComments=preserveComments
195 self.preserveNewLines=preserveNewlines
196 self.duplicateCheck=duplicateCheck
197 self.duplicateFail=duplicateFail
198 self.noVectorOrTensor=noVectorOrTensor
199
200
201 self.collectDecorations=False
202 self.inputMode=inputModes.merge
203
204 self._decorationBuffer=""
205
206 startCnt=0
207
208 self.dictStack=[DictProxy()]
209
210 if noBody:
211 self.start='noBody'
212 startCnt+=1
213
214 if noHeader:
215 self.start='noHeader'
216 startCnt+=1
217 self.collectDecorations=True
218
219 if listDict:
220 self.start='pureList'
221 startCnt+=1
222 self.dictStack=[]
223 self.collectDecorations=True
224
225 if listDictWithHeader:
226 self.start='pureListWithHeader'
227 startCnt+=1
228
229 if boundaryDict:
230 self.start='boundaryDict'
231 startCnt+=1
232
233 if startCnt>1:
234 error("Only one start symbol can be specified.",startCnt,"are specified")
235
236 PlyParser.__init__(self,debug=debug)
237
238
239
240
241 self.emptyCnt=0
242
243 self.header,self.data=self.parse(content)
244
246 return key in self.data
247
249 return self.data[key]
250
253
256
258 for key in self.data:
259 yield key
260
261
262
263
264
265
266
268 self._decorationBuffer=""
269
271 if self.collectDecorations:
272 self._decorationBuffer+=text
273
277
281
283 tmp=self._decorationBuffer
284 self.resetDecoration()
285 if len(tmp)>0:
286 if tmp[-1]=='\n':
287 tmp=tmp[:-1]
288 return tmp
289
291 if self.fName==None:
292 return path.curdir
293 else:
294 return path.dirname(self.fName)
295
297 """ Get the data structure"""
298 return self.data
299
301 """ Get the OpenFOAM-header"""
302 return self.header
303
304 - def printContext(self,c,ind):
305 """Prints the context of the current index"""
306 print "------"
307 print c[max(0,ind-100):max(0,ind-1)]
308 print "------"
309 print ">",c[ind-1],"<"
310 print "------"
311 print c[min(len(c),ind):min(len(c),ind+100)]
312 print "------"
313
315 """Prints the error message of the parser and exit"""
316 print "PARSER ERROR:",text
317 print "On index",ind
318 self.printContext(c,ind)
319 raise PyFoamParserError("Unspecified")
320
322 """Checks whether this list is a list that consists only of prefix-Lists"""
323 isAllPreList=False
324 if (len(orig) % 2)==0:
325 isAllPreList=True
326 for i in range(0,len(orig),2):
327 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]):
328 if len(orig[i+1])!=orig[i]:
329 isAllPreList=False
330 break
331 else:
332 isAllPreList=False
333 break
334
335 if isAllPreList:
336 return orig[1::2]
337 else:
338 return orig
339
340 tokens = (
341 'NAME',
342 'ICONST',
343 'FCONST',
344 'SCONST',
345 'FOAMFILE',
346 'UNIFORM',
347 'NONUNIFORM',
348 'UNPARSEDCHUNK',
349 'CODESTREAMCHUNK',
350 'REACTION',
351 'SUBSTITUTION',
352 'MERGE',
353 'OVERWRITE',
354 'ERROR',
355 'WARN',
356 'PROTECT',
357 'DEFAULT',
358 'INCLUDE',
359 'INCLUDEIFPRESENT',
360 'REMOVE',
361 'INPUTMODE',
362 'KANALGITTER',
363 'CODESTART',
364 'CODEEND',
365 )
366
367 reserved = {
368 'FoamFile' : 'FOAMFILE',
369 'uniform' : 'UNIFORM',
370 'nonuniform' : 'NONUNIFORM',
371 'include' : 'INCLUDE',
372 'includeIfPresent': 'INCLUDEIFPRESENT',
373 'remove' : 'REMOVE',
374 'inputMode' : 'INPUTMODE',
375 'merge' : 'MERGE',
376 'overwrite' : 'OVERWRITE',
377 'error' : 'ERROR',
378 'warn' : 'WARN',
379 'protect' : 'PROTECT',
380 'default' : 'DEFAULT',
381 }
382
383 states = (
384 ('unparsed', 'exclusive'),
385 ('codestream', 'exclusive'),
386 ('mlcomment', 'exclusive'),
387 )
388
390 r'\('
391 t.lexer.level+=1
392
393
395 r'\)'
396 t.lexer.level-=1
397
398 if t.lexer.level < 0 :
399 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1]
400
401 t.lexer.lexpos-=1
402 t.type = "UNPARSEDCHUNK"
403 t.lexer.lineno += t.value.count('\n')
404 t.lexer.begin('INITIAL')
405 return t
406
407 t_unparsed_ignore = ' \t\n0123456789.-+e'
408
410 print "Error",t.lexer.lexdata[t.lexer.lexpos]
411 t.lexer.skip(1)
412
414 r"\#\}"
415 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-2]
416 t.lexer.lexpos-=2
417 t.type = "CODESTREAMCHUNK"
418 t.lexer.lineno += t.value.count('\n')
419 t.lexer.begin('INITIAL')
420 return t
421
422 t_codestream_ignore = ''
423
427
429 print "Error",t.lexer.lexdata[t.lexer.lexpos]
430 t.lexer.skip(1)
431
442
444 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*'
445 t.type=self.reserved.get(t.value,'SUBSTITUTION')
446 if t.value[-1]==")":
447 if t.value.count(")")>t.value.count("("):
448
449 t.value=t.value[:-1]
450 t.lexer.lexpos-=1
451
452 return t
453
454 t_CODESTART = r'\#\{'
455
456 t_CODEEND = r'\#\}'
457
458 t_KANALGITTER = r'\#'
459
460 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
461
462 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
463
464 t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
465
466 literals = "(){};[]"
467
468 t_ignore=" \t\r"
469
470
472 r'\n+'
473 t.lexer.lineno += len(t.value)
474 now=t.lexer.lexpos
475 next=t.lexer.lexdata.find('\n',now)
476 if next>=0:
477 line=t.lexer.lexdata[now:next]
478 pos=line.find("=")
479 if pos>=0:
480 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos):
481 return
482 t.value = line
483 t.type = "REACTION"
484 t.lexer.lineno += 1
485 t.lexer.lexpos = next
486 return t
487
488
489
494
500
504
513
517
518 t_mlcomment_ignore = ''
519
524
525
528
529
531 'global : header dictbody'
532 p[0] = ( p[1] , p[2] )
533
535 'gotHeader :'
536 p.lexer.lexpos=len(p.lexer.lexdata)
537
538 - def p_noBody(self,p):
539 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' '''
540 p[0] = ( p[3] , {} )
541
543 'noHeader : dictbody'
544 p[0] = ( None , p[1] )
545
547 'pureList : list'
548 p[0] = ( None , p[1] )
549
551 '''pureListWithHeader : header list
552 | header prelist '''
553 p[0] = ( p[1] , p[2] )
554
556 '''boundaryDict : header list
557 | header prelist '''
558
559 p[0] = ( p[1] , p[2] )
560
562 'header : FOAMFILE dictionary'
563 p[0] = p[2]
564 if p[0]["format"]=="binary":
565 self.binaryMode=True
566 raise FatalErrorPyFoamException("Can not parse binary files. It is not implemented")
567 elif p[0]["format"]=="ascii":
568 self.binaryMode=False
569 else:
570 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"])
571 self.collectDecorations=True
572
574 '''macro : KANALGITTER include
575 | KANALGITTER inputMode
576 | KANALGITTER remove'''
577 p[0] = p[1]+p[2]+"\n"
578 if self.doMacros:
579 p[0]="// "+p[0]
580
582 '''include : INCLUDE SCONST
583 | INCLUDEIFPRESENT SCONST'''
584 if self.doMacros:
585 fName=path.join(self.directory(),p[2][1:-1])
586 read=True
587 if p[1]=="includeIfPresent" and not path.exists(fName):
588 read=False
589 if read and not path.exists(fName):
590 raise PyFoamParserError("The included file "+fName+" does not exist")
591 if read:
592 data=ParsedParameterFile(fName,noHeader=True)
593 into=self.dictStack[-1]
594 for k in data:
595 into[k]=data[k]
596
597 p[0] = p[1] + " " + p[2]
598
608
610 '''remove : REMOVE word
611 | REMOVE wlist'''
612 p[0] = p[1] + " "
613 if type(p[2])==str:
614 p[0]+=p[2]
615 else:
616 p[0]+="( "
617 for w in p[2]:
618 p[0]+=w+" "
619 p[0]+=")"
620
622 '''integer : ICONST'''
623 p[0] = int(p[1])
624
626 '''integer : FCONST'''
627 p[0] = float(p[1])
628
632
634 '''exit_dict :'''
635 p[0]=self.dictStack.pop()
636
638 '''dictionary : '{' enter_dict dictbody '}' exit_dict
639 | '{' '}' '''
640 if len(p)==6:
641 p[0] = p[5]
642 else:
643 p[0] = DictProxy()
644
645 - def p_dictbody(self,p):
646 '''dictbody : dictbody dictline
647 | dictline
648 | empty'''
649
650 if len(p)==3:
651 p[0]=p[1]
652 if self.duplicateCheck:
653 if p[2][0] in p[0]:
654 if self.duplicateFail:
655 error("Key",p[2][0],"already defined")
656 else:
657 warning("Key",p[2][0],"already defined")
658 if type(p[2][0])==DictRedirection and p[2][1]=='':
659 p[0].addRedirection(p[2][0])
660 else:
661 if type(p[2][1])==DictRedirection:
662 p[0][p[2][0]]=p[2][1].getContent()
663 else:
664 p[0][p[2][0]]=p[2][1]
665 p[0].addDecoration(p[2][0],self.getDecoration())
666 else:
667 p[0]=self.dictStack[-1]
668
669 if p[1]:
670 if type(p[1][0])==DictRedirection and p[1][1]=='':
671 p[0].addRedirection(p[1][0])
672 else:
673 if type(p[1][1])==DictRedirection:
674 p[0][p[1][0]]=p[1][1].getContent()
675 else:
676 p[0][p[1][0]]=p[1][1]
677
678
697
699 '''wlist : '(' wordlist ')' '''
700 p[0] = p[2]
701
703 '''unparsed : UNPARSEDCHUNK'''
704 p[0] = Unparsed(p[1])
705
707 '''prelist_seen : '''
708 if self.listLengthUnparsed!=None:
709 if int(p[-1])>=self.listLengthUnparsed:
710 p.lexer.begin('unparsed')
711 p.lexer.level=0
712 p.lexer.code_start = p.lexer.lexpos
713
715 '''codestream : codeSeen CODESTART CODESTREAMCHUNK CODEEND '''
716 p[0] = Codestream(p[3])
717
719 '''codeSeen : '''
720 p.lexer.begin('codestream')
721 p.lexer.level=0
722 p.lexer.code_start = p.lexer.lexpos
723
725 '''prelist : integer prelist_seen '(' itemlist ')'
726 | integer prelist_seen '(' unparsed ')' '''
727 if type(p[4])==Unparsed:
728 p[0] = UnparsedList(int(p[1]),p[4].data)
729 else:
730 p[0] = self.condenseAllPreFixLists(p[4])
731
733 '''itemlist : itemlist item
734 | itemlist ';'
735 | item '''
736 if len(p)==2:
737 if p[1]==None:
738 p[0]=[]
739 else:
740 p[0]=[ p[1] ]
741 else:
742 p[0]=p[1]
743 if p[2]!=';':
744 p[0].append(p[2])
745
747 '''wordlist : wordlist word
748 | word '''
749 if len(p)==2:
750 if p[1]==None:
751 p[0]=[]
752 else:
753 p[0]=[ p[1] ]
754 else:
755 p[0]=p[1]
756 p[0].append(p[2])
757
759 '''word : NAME
760 | UNIFORM
761 | NONUNIFORM
762 | MERGE
763 | OVERWRITE
764 | DEFAULT
765 | WARN
766 | PROTECT
767 | ERROR'''
768 p[0]=p[1]
769
771 '''substitution : SUBSTITUTION'''
772 if self.doMacros:
773 nm=p[1][1:]
774 p[0]="<Symbol '"+nm+"' not found>"
775 for di in reversed(self.dictStack):
776 if nm in di:
777 p[0]=DictRedirection(deepcopy(di[nm]),
778 di[nm],
779 nm)
780 return
781 else:
782 p[0]=p[1]
783
785 '''dictkey : word
786 | SCONST'''
787 p[0]=p[1]
788
790 '''dictline : dictkey dictitem ';'
791 | dictkey list ';'
792 | dictkey prelist ';'
793 | dictkey fieldvalue ';'
794 | macro
795 | substitution ';'
796 | dictkey codestream ';'
797 | dictkey dictionary'''
798 if len(p)==4 and type(p[2])==list:
799
800 doAgain=True
801 tmp=p[2]
802 while doAgain:
803 doAgain=False
804 for i in range(len(tmp)-1):
805 if type(tmp[i])==int and type(tmp[i+1]) in [list]:
806 if tmp[i]==len(tmp[i+1]):
807 nix=tmp[:i]+tmp[i+1:]
808 for i in range(len(tmp)):
809 tmp.pop()
810 tmp.extend(nix)
811 doAgain=True
812 break
813 if len(p)==4:
814 p[0] = ( p[1] , p[2] )
815 elif len(p)==3:
816 if p[2]==';':
817 p[0]= (p[1],'')
818 else:
819 p[0] = ( p[1] , p[2] )
820 else:
821 p[0] = ( self.emptyCnt , p[1] )
822 self.emptyCnt+=1
823
825 '''number : integer
826 | FCONST'''
827 p[0] = p[1]
828
830 '''dimension : '[' number number number number number number number ']'
831 | '[' number number number number number ']' '''
832 result=p[2:-1]
833 if len(result)==5:
834 result+=[0,0]
835
836 p[0]=apply(Dimension,result)
837
839 '''vector : '(' number number number ')' '''
840 if self.noVectorOrTensor:
841 p[0]=p[2:5]
842 else:
843 p[0]=apply(Vector,p[2:5])
844
846 '''tensor : '(' number number number number number number number number number ')' '''
847 if self.noVectorOrTensor:
848 p[0]=p[2:11]
849 else:
850 p[0]=apply(Tensor,p[2:11])
851
853 '''symmtensor : '(' number number number number number number ')' '''
854 if self.noVectorOrTensor:
855 p[0]=p[2:8]
856 else:
857 p[0]=apply(SymmTensor,p[2:8])
858
865
870
872 '''dictitem : longitem
873 | pitem'''
874 if type(p[1])==tuple:
875 if len(p[1])==2 and p[1][0]=="uniform":
876 p[0]=Field(p[1][1])
877 elif len(p[1])==3 and p[1][0]=="nonuniform":
878 p[0]=Field(p[1][2],name=p[1][1])
879 else:
880 p[0]=TupleProxy(p[1])
881 else:
882 p[0] = p[1]
883
885 '''longitem : pitemlist pitem'''
886 p[0] = p[1]+(p[2],)
887
889 '''pitemlist : pitemlist pitem
890 | pitem '''
891 if len(p)==2:
892 p[0]=(p[1],)
893 else:
894
895
896
897 p[0]=p[1]+(p[2],)
898
900 '''pitem : word
901 | SCONST
902 | number
903 | dictionary
904 | list
905 | dimension
906 | substitution
907 | empty'''
908 p[0] = p[1]
909
911 '''item : pitem
912 | REACTION
913 | list
914 | dictionary'''
915 p[0] = p[1]
916
920
923
924
925
931
933 result="Error in PyFoamParser: '"+self.descr+"'"
934 if self.data!=None:
935 val=self.data.value
936 if len(val)>100:
937 val=val[:40]+" .... "+val[-40:]
938
939 result+=" @ %r (Type: %s ) in line %d at position %d" % (val,
940 self.data.type,
941 self.data.lineno,
942 self.data.lexpos)
943
944 return result
945
948
950 """Convenience class that parses only a headerless OpenFOAM dictionary"""
951
952 - def __init__(self,
953 content,
954 debug=False,
955 noVectorOrTensor=False,
956 duplicateCheck=False,
957 listDict=False,
958 doMacroExpansion=False,
959 duplicateFail=False):
960 """@param content: the string to be parsed
961 @param debug: output debug information during parsing"""
962
963 FoamFileParser.__init__(self,
964 content,
965 debug=debug,
966 noHeader=not listDict,
967 boundaryDict=False,
968 listDict=listDict,
969 noVectorOrTensor=noVectorOrTensor,
970 duplicateCheck=duplicateCheck,
971 doMacroExpansion=doMacroExpansion,
972 duplicateFail=duplicateFail)
973
976
978 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file"""
979
980 - def __init__(self,name,backup=False,debug=False):
985
986 - def parse(self,content):
987 """Constructs a representation of the file"""
988 temp=ParsedParameterFile.parse(self,content)
989 self.content={}
990 for i in range(0,len(temp),2):
991 self.content[temp[i]]=temp[i+1]
992 return self.content
993
995 string="// File generated by PyFoam - sorry for the ugliness\n\n"
996 temp=[]
997 for k,v in self.content.iteritems():
998 temp.append((k,v))
999
1000 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"])))
1001
1002 temp2=[]
1003
1004 for b in temp:
1005 temp2.append(b[0])
1006 temp2.append(b[1])
1007
1008 generator=FoamFileGenerator(temp2,header=self.header)
1009 string+=str(generator)
1010
1011 return string
1012
1014 """Only parse the header of a file"""
1015
1018
1020 return self.header[name]
1021
1023 return name in self.header
1024
1026 return len(self.header)
1027