1
2 """Parameter file is read into memory and modified there"""
3
4 from PyFoam.RunDictionary.FileBasis import FileBasisBackup
5 from PyFoam.Basics.PlyParser import PlyParser
6 from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator
7
8 from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList,Codestream,DictRedirection,BinaryBlob,BinaryList,BoolProxy
9
10 from PyFoam.Error import error,warning,FatalErrorPyFoamException
11
12 from os import path
13 from copy import deepcopy
14 import sys
15
16 from PyFoam.ThirdParty.six import print_,integer_types,iteritems
17
19 """ Parameterfile whose complete representation is read into
20 memory, can be manipulated and afterwards written to disk"""
21
22 - def __init__(self,
23 name,
24 backup=False,
25 debug=False,
26 boundaryDict=False,
27 listDict=False,
28 listDictWithHeader=False,
29 listLengthUnparsed=None,
30 preserveComments=True,
31 noHeader=False,
32 binaryMode=False,
33 treatBinaryAsASCII=False,
34 noBody=False,
35 doMacroExpansion=False,
36 dontRead=False,
37 noVectorOrTensor=False,
38 dictStack=None,
39 createZipped=True,
40 longListOutputThreshold=20):
41 """@param name: The name of the parameter file
42 @param backup: create a backup-copy of the file
43 @param boundaryDict: the file to parse is a boundary file
44 @param listDict: the file only contains a list
45 @param listDictWithHeader: the file only contains a list and a header
46 @param listLengthUnparsed: Lists longer than that length are not parsed
47 @param binaryMode: Parse long lists in binary mode (to be overridden by
48 the settings in the header).
49 @param treatBinaryAsASCII: even if the header says that this is a
50 binary file treat it like an ASCII-file
51 @param noHeader: don't expect a header
52 @param noBody: don't read the body of the file (only the header)
53 @param doMacroExpansion: expand #include and $var
54 @param noVectorOrTensor: short lists of length 3, 6 an 9 are NOT
55 interpreted as vectors or tensors
56 @param dontRead: Do not read the file during construction
57 @param longListOutputThreshold: Lists that are longer than this are
58 prefixed with a length
59 @param dictStack: dictionary stack for lookup (only used for include)
60 """
61
62 self.noHeader=noHeader
63 self.noBody=noBody
64 FileBasisBackup.__init__(self,
65 name,
66 backup=backup,
67 createZipped=createZipped)
68 self.debug=debug
69 self.boundaryDict=boundaryDict
70 self.listDict=listDict
71 self.listDictWithHeader=listDictWithHeader
72 self.listLengthUnparsed=listLengthUnparsed
73 self.doMacros=doMacroExpansion
74 self.preserveComments=preserveComments
75 self.noVectorOrTensor=noVectorOrTensor
76 self.header=None
77 self.content=None
78 self.longListOutputThreshold=longListOutputThreshold
79 self.binaryMode=binaryMode
80 self.treatBinaryAsASCII=treatBinaryAsASCII
81 self.lastDecoration=""
82 self.dictStack=dictStack
83
84 if not dontRead:
85 self.readFile()
86
88 """Constructs a representation of the file"""
89 try:
90 parser=FoamFileParser(content,
91 debug=self.debug,
92 fName=self.name,
93 boundaryDict=self.boundaryDict,
94 listDict=self.listDict,
95 listDictWithHeader=self.listDictWithHeader,
96 listLengthUnparsed=self.listLengthUnparsed,
97 noHeader=self.noHeader,
98 noBody=self.noBody,
99 preserveComments=self.preserveComments,
100 binaryMode=self.binaryMode,
101 treatBinaryAsASCII=self.treatBinaryAsASCII,
102 noVectorOrTensor=self.noVectorOrTensor,
103 dictStack=self.dictStack,
104 doMacroExpansion=self.doMacros)
105 except BinaryParserError:
106 e = sys.exc_info()[1]
107 if not self.treatBinaryAsASCII:
108
109 parser=FoamFileParser(content,
110 debug=self.debug,
111 fName=self.name,
112 boundaryDict=self.boundaryDict,
113 listDict=self.listDict,
114 listDictWithHeader=self.listDictWithHeader,
115 listLengthUnparsed=self.listLengthUnparsed,
116 noHeader=self.noHeader,
117 noBody=self.noBody,
118 preserveComments=self.preserveComments,
119 binaryMode=self.binaryMode,
120 treatBinaryAsASCII=True,
121 noVectorOrTensor=self.noVectorOrTensor,
122 dictStack=self.dictStack,
123 doMacroExpansion=self.doMacros)
124 else:
125 raise e
126
127 self.content=parser.getData()
128 self.header=parser.getHeader()
129 self.lastDecoration=parser._decorationBuffer
130
131 return self.content
132
134 return key in self.content
135
137 return self.content[key]
138
140 self.content[key]=value
141
143 del self.content[key]
144
146 return len(self.content)
147
149 for key in self.content:
150 yield key
151
153 """Generates a string from the contents in memory
154 Used to be called makeString"""
155
156 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n"
157
158 generator=FoamFileGenerator(self.content,
159 header=self.header if not self.noHeader else None,
160 longListThreshold=self.longListOutputThreshold)
161 string+=generator.makeString(firstLevel=True)
162
163 if len(self.lastDecoration)>0:
164 string+="\n\n"+self.lastDecoration
165
166 return string
167
169 """Get a dictionary with the values with the decorators removed"""
170 result={}
171 if self.content:
172 for k in self.content:
173 if type(k) not in integer_types:
174 result[k]=self.content[k]
175 return result
176
178 """A specialization that is used to only write to the file"""
179 - def __init__(self,
180 name,
181 backup=False,
182 className="dictionary",
183 objectName=None,
184 createZipped=False,
185 **kwargs):
186 ParsedParameterFile.__init__(self,
187 name,
188 backup=backup,
189 dontRead=True,
190 createZipped=createZipped,
191 **kwargs)
192
193 if objectName==None:
194 objectName=path.basename(name)
195
196 self.content=DictProxy()
197 self.header={"version":"2.0",
198 "format":"ascii",
199 "class":className,
200 "object":objectName}
201
204 for number, name in enumerate(names):
205 setattr(self, name, number)
206
207 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"])
208
210 """Class that parses a string that contains the contents of an
211 OpenFOAM-file and builds a nested structure of directories and
212 lists from it"""
213
214 - def __init__(self,
215 content,
216 fName=None,
217 debug=False,
218 noHeader=False,
219 noBody=False,
220 doMacroExpansion=False,
221 boundaryDict=False,
222 preserveComments=True,
223 preserveNewlines=True,
224 listDict=False,
225 listDictWithHeader=False,
226 listLengthUnparsed=None,
227 binaryMode=False,
228 treatBinaryAsASCII=False,
229 duplicateCheck=False,
230 noVectorOrTensor=False,
231 dictStack=None,
232 duplicateFail=True):
233 """@param content: the string to be parsed
234 @param fName: Name of the actual file (if any)
235 @param debug: output debug information during parsing
236 @param noHeader: switch that turns off the parsing of the header
237 @param duplicateCheck: Check for duplicates in dictionaries
238 @param duplicateFail: Fail if a duplicate is discovered"""
239
240 self.binaryMode=binaryMode
241 self.treatBinaryAsASCII=treatBinaryAsASCII
242 self.fName=fName
243 self.data=None
244 self.header=None
245 self.debug=debug
246 self.listLengthUnparsed=listLengthUnparsed
247 self.doMacros=doMacroExpansion
248 self.preserveComments=preserveComments
249 self.preserveNewLines=preserveNewlines
250 self.duplicateCheck=duplicateCheck
251 self.duplicateFail=duplicateFail
252 self.noVectorOrTensor=noVectorOrTensor
253 self.inHeader=True
254 self.inBinary=False
255
256
257 self.collectDecorations=False
258 self.inputMode=inputModes.merge
259
260 self._decorationBuffer=""
261
262 startCnt=0
263
264 self.dictStack=dictStack
265 if self.dictStack==None:
266 self.dictStack=[DictProxy()]
267
268 if noBody:
269 self.start='noBody'
270 startCnt+=1
271
272 if noHeader:
273 self.inHeader=False
274 self.start='noHeader'
275 startCnt+=1
276 self.collectDecorations=True
277
278 if listDict:
279 self.inHeader=False
280 self.start='pureList'
281 startCnt+=1
282 self.dictStack=[]
283 self.collectDecorations=True
284
285 if listDictWithHeader:
286 self.start='pureListWithHeader'
287 startCnt+=1
288
289 if boundaryDict:
290 self.start='boundaryDict'
291 startCnt+=1
292
293 if startCnt>1:
294 error("Only one start symbol can be specified.",startCnt,"are specified")
295
296 PlyParser.__init__(self,debug=debug)
297
298
299
300
301 self.emptyCnt=0
302
303 self.header,self.data=self.parse(content)
304
306 return key in self.data
307
309 return self.data[key]
310
313
316
318 for key in self.data:
319 yield key
320
321
322
323
324
325
326
328 self._decorationBuffer=""
329
331 if self.collectDecorations:
332 self._decorationBuffer+=text
333
337
341
343 tmp=self._decorationBuffer
344 self.resetDecoration()
345 if len(tmp)>0:
346 if tmp[-1]=='\n':
347 tmp=tmp[:-1]
348 return tmp
349
351 if self.fName==None:
352 return path.curdir
353 else:
354 return path.dirname(self.fName)
355
357 """ Get the data structure"""
358 return self.data
359
361 """ Get the OpenFOAM-header"""
362 return self.header
363
364 - def printContext(self,c,ind):
365 """Prints the context of the current index"""
366 print_("------")
367 print_(c[max(0,ind-100):max(0,ind-1)])
368 print_("------")
369 print_(">",c[ind-1],"<")
370 print_("------")
371 print_(c[min(len(c),ind):min(len(c),ind+100)])
372 print_("------")
373
380
382 """Checks whether this list is a list that consists only of prefix-Lists"""
383 isAllPreList=False
384 if (len(orig) % 2)==0:
385 isAllPreList=True
386 for i in range(0,len(orig),2):
387 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]):
388 if len(orig[i+1])!=orig[i]:
389 isAllPreList=False
390 break
391 else:
392 isAllPreList=False
393 break
394
395 if isAllPreList:
396 return orig[1::2]
397 else:
398 return orig
399
400 tokens = (
401 'NAME',
402 'ICONST',
403 'FCONST',
404 'SCONST',
405 'FOAMFILE',
406 'UNIFORM',
407 'NONUNIFORM',
408 'UNPARSEDCHUNK',
409 'CODESTREAMCHUNK',
410 'REACTION',
411 'SUBSTITUTION',
412 'MERGE',
413 'OVERWRITE',
414 'ERROR',
415 'WARN',
416 'PROTECT',
417 'DEFAULT',
418 'INCLUDE',
419 'INCLUDEIFPRESENT',
420 'REMOVE',
421 'INPUTMODE',
422 'KANALGITTER',
423 'CODESTART',
424 'CODEEND',
425 'BINARYBLOB',
426 )
427
428 reserved = {
429 'FoamFile' : 'FOAMFILE',
430 'uniform' : 'UNIFORM',
431 'nonuniform' : 'NONUNIFORM',
432 'include' : 'INCLUDE',
433 'includeIfPresent': 'INCLUDEIFPRESENT',
434 'remove' : 'REMOVE',
435 'inputMode' : 'INPUTMODE',
436 'merge' : 'MERGE',
437 'overwrite' : 'OVERWRITE',
438 'error' : 'ERROR',
439 'warn' : 'WARN',
440 'protect' : 'PROTECT',
441 'default' : 'DEFAULT',
442 }
443
444 states = (
445 ('unparsed', 'exclusive'),
446 ('codestream', 'exclusive'),
447 ('mlcomment', 'exclusive'),
448 ('binaryblob', 'exclusive'),
449 )
450
452 r'\('
453 t.lexer.level+=1
454
455
457 r'\)'
458 t.lexer.level-=1
459
460 if t.lexer.level < 0 :
461 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1]
462
463 t.lexer.lexpos-=1
464 t.type = "UNPARSEDCHUNK"
465 t.lexer.lineno += t.value.count('\n')
466 t.lexer.begin('INITIAL')
467 return t
468
469 t_unparsed_ignore = ' \t\n0123456789.-+e'
470
474
475 t_binaryblob_ignore = ''
476
478 r"\)"
479 size=t.lexer.lexpos-t.lexer.binary_start-1
480
481
482
483 if (size % t.lexer.binary_listlen)==0:
484
485 nextChar=t.lexer.lexdata[t.lexer.lexpos]
486 nextNextChar=t.lexer.lexdata[t.lexer.lexpos+1]
487 if (nextChar in [';','\n'] and nextNextChar=='\n'):
488 t.value = t.lexer.lexdata[t.lexer.binary_start:t.lexer.lexpos-1]
489 assert(len(t.value)%t.lexer.binary_listlen == 0)
490 t.lexer.lexpos-=1
491 t.type = "BINARYBLOB"
492 t.lexer.lineno += t.value.count('\n')
493 t.lexer.begin('INITIAL')
494 self.inBinary=False
495 return t
496
500
504
506 r"\#\}"
507 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-2]
508 t.lexer.lexpos-=2
509 t.type = "CODESTREAMCHUNK"
510 t.lexer.lineno += t.value.count('\n')
511 t.lexer.begin('INITIAL')
512 return t
513
514 t_codestream_ignore = ''
515
519
523
534
536 r'\$[a-zA-Z_.:{][+\-<>(),.\*|a-zA-Z_0-9&%:${}]*'
537 t.type=self.reserved.get(t.value,'SUBSTITUTION')
538 if t.value[-1]==")":
539 if t.value.count(")")>t.value.count("("):
540
541 t.value=t.value[:-1]
542 t.lexer.lexpos-=1
543
544 return t
545
546 t_CODESTART = r'\#\{'
547
548 t_CODEEND = r'\#\}'
549
550 t_KANALGITTER = r'\#'
551
552 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
553
554 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
555
556 t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
557
558 literals = "(){};[]"
559
560 t_ignore=" \t\r"
561
562
580
581
582
587
593
597
606
610
611 t_mlcomment_ignore = ''
612
617
618
625
626
628 'global : header dictbody'
629 p[0] = ( p[1] , p[2] )
630
632 'gotHeader :'
633 p.lexer.lexpos=len(p.lexer.lexdata)
634 self.inHeader=False
635
636 - def p_noBody(self,p):
637 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' '''
638 p[0] = ( p[3] , {} )
639
641 'noHeader : dictbody'
642 p[0] = ( None , p[1] )
643
645 'pureList : onlyListOrPList'
646 p[0] = ( None , p[1] )
647
649 '''onlyListOrPList : list
650 | prelist '''
651 p[0]=p[1]
652
654 '''pureListWithHeader : header onlyListOrPList'''
655 p[0] = ( p[1] , p[2] )
656
658 'afterHeader :'
659 pass
660
662 '''boundaryDict : header list
663 | header prelist '''
664
665 p[0] = ( p[1] , p[2] )
666
668 'header : FOAMFILE dictionary'
669 self.inHeader=False
670 p[0] = p[2]
671
672
673
674
675
676
677
678
679
680
681
682 self.collectDecorations=True
683
685 '''macro : KANALGITTER include
686 | KANALGITTER inputMode
687 | KANALGITTER remove'''
688 p[0] = p[1]+p[2]+"\n"
689 if self.doMacros:
690 p[0]="// "+p[0]
691
693 '''include : INCLUDE SCONST
694 | INCLUDEIFPRESENT SCONST'''
695 if self.doMacros:
696 fName=path.join(self.directory(),p[2][1:-1])
697 read=True
698 if p[1]=="includeIfPresent" and not path.exists(fName):
699 read=False
700 if read and not path.exists(fName):
701 raise PyFoamParserError("The included file "+fName+" does not exist")
702 if read:
703 data=ParsedParameterFile(fName,
704 noHeader=True,
705 dictStack=self.dictStack,
706 doMacroExpansion=self.doMacros)
707 into=self.dictStack[-1]
708 for k in data:
709 into[k]=data[k]
710
711 p[0] = p[1] + " " + p[2]
712
722
724 '''remove : REMOVE word
725 | REMOVE wlist'''
726 p[0] = p[1] + " "
727 if type(p[2])==str:
728 p[0]+=p[2]
729 else:
730 p[0]+="( "
731 for w in p[2]:
732 p[0]+=w+" "
733 p[0]+=")"
734
736 '''integer : ICONST'''
737 p[0] = int(p[1])
738
740 '''integer : FCONST'''
741 p[0] = float(p[1])
742
746
748 '''exit_dict :'''
749 p[0]=self.dictStack.pop()
750
752 '''dictionary : '{' enter_dict dictbody '}' exit_dict
753 | '{' '}' '''
754 if len(p)==6:
755 p[0] = p[5]
756 else:
757 p[0] = DictProxy()
758
759 - def p_dictbody(self,p):
760 '''dictbody : dictbody dictline
761 | dictline
762 | empty'''
763
764 if len(p)==3:
765 p[0]=p[1]
766 if self.duplicateCheck:
767 if p[2][0] in p[0]:
768 if self.duplicateFail:
769 error("Key",p[2][0],"already defined")
770 else:
771 warning("Key",p[2][0],"already defined")
772 if type(p[2][0])==DictRedirection and p[2][1]=='':
773 p[0].addRedirection(p[2][0])
774 else:
775 if type(p[2][1])==DictRedirection:
776 p[0][p[2][0]]=p[2][1].getContent()
777 else:
778 p[0][p[2][0]]=p[2][1]
779 p[0].addDecoration(p[2][0],self.getDecoration())
780 else:
781 p[0]=self.dictStack[-1]
782
783 if p[1]:
784 if type(p[1][0])==DictRedirection and p[1][1]=='':
785 p[0].addRedirection(p[1][0])
786 else:
787 if type(p[1][1])==DictRedirection:
788 p[0][p[1][0]]=p[1][1].getContent()
789 else:
790 p[0][p[1][0]]=p[1][1]
791
792
811
813 '''wlist : '(' wordlist ')' '''
814 p[0] = p[2]
815
817 '''unparsed : UNPARSEDCHUNK'''
818 p[0] = Unparsed(p[1])
819
821 '''binaryblob : BINARYBLOB'''
822 p[0] = BinaryBlob(p[1])
823
825 '''prelist_seen : '''
826 if self.binaryMode:
827 p.lexer.begin('binaryblob')
828 p.lexer.binary_start = p.lexer.lexpos
829 p.lexer.binary_listlen = p[-1]
830 self.inBinary=True
831 elif self.listLengthUnparsed!=None:
832 if int(p[-1])>=self.listLengthUnparsed:
833 p.lexer.begin('unparsed')
834 p.lexer.level=0
835 p.lexer.code_start = p.lexer.lexpos
836
838 '''codestream : codeSeen CODESTART CODESTREAMCHUNK CODEEND '''
839 p[0] = Codestream(p[3])
840
842 '''codeSeen : '''
843 p.lexer.begin('codestream')
844 p.lexer.level=0
845 p.lexer.code_start = p.lexer.lexpos
846
848 '''prelist : integer prelist_seen '(' itemlist ')'
849 | integer prelist_seen '(' binaryblob ')'
850 | integer prelist_seen '(' unparsed ')' '''
851 if type(p[4])==Unparsed:
852 p[0] = UnparsedList(int(p[1]),p[4].data)
853 elif type(p[4])==BinaryBlob:
854 p[0] = BinaryList(int(p[1]),p[4].data)
855 else:
856 p[0] = self.condenseAllPreFixLists(p[4])
857
859 '''itemlist : itemlist item
860 | itemlist ';'
861 | item '''
862 if len(p)==2:
863 if p[1]==None:
864 p[0]=[]
865 else:
866 p[0]=[ p[1] ]
867 else:
868 p[0]=p[1]
869 if p[2]!=';':
870 p[0].append(p[2])
871
873 '''wordlist : wordlist word
874 | word '''
875 if len(p)==2:
876 if p[1]==None:
877 p[0]=[]
878 else:
879 p[0]=[ p[1] ]
880 else:
881 p[0]=p[1]
882 p[0].append(p[2])
883
898
900 if nm[0]==":":
901 stck=[self.dictStack[0]]
902 nm=nm[1:]
903 elif nm[0]=='.':
904 nm=nm[1:]
905 off=0
906 while nm[0]=='.':
907 nm=nm[1:]
908 off+=1
909 if off>0:
910 stck=stck[:-off]
911 elif nm[0]=="{":
912 inner=nm[1:nm.rfind("}")].strip()
913 if inner[0]=="$":
914 nm=self.parseSubst_root(inner[1:],stck)()
915 else:
916 nm=inner
917 rest=None
918 if nm.find(".")>0:
919 rest=nm[nm.find(".")+1:]
920 nm=nm[:nm.find(".")]
921 for i,di in enumerate(reversed(stck)):
922 if nm in di:
923 if rest==None:
924 v=DictRedirection(deepcopy(di[nm]),
925 di[nm],
926 nm)
927 return v
928 else:
929 newStck=stck[:i]
930 newStck.append(di[nm])
931 return self.parseSubst_root(rest,newStck)
932
934 '''substitution : SUBSTITUTION'''
935 if self.doMacros:
936 nm=p[1][1:]
937 p[0]="<Symbol '"+nm+"' not found>"
938 stck=self.dictStack
939 p[0]=self.parseSubst_root(nm,stck)
940 else:
941 p[0]=p[1]
942
944 '''dictkey : word
945 | SCONST'''
946 if type(p[1])==BoolProxy:
947 p[0]=str(p[1])
948 else:
949 p[0]=p[1]
950
952 '''dictline : dictkey dictitem ';'
953 | dictkey list ';'
954 | dictkey prelist ';'
955 | dictkey fieldvalue ';'
956 | macro
957 | substitution ';'
958 | dictkey codestream ';'
959 | dictkey dictionary'''
960 if len(p)==4 and self.inHeader and p[1]=="format" and type(p[2])==str:
961 if p[2]=="binary":
962 if not self.treatBinaryAsASCII:
963 self.binaryMode=True
964 else:
965 self.binaryMode=False
966 elif p[2]=="ascii":
967 self.binaryMode=False
968 else:
969 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"])
970
971 if len(p)==4 and type(p[2])==list:
972
973 doAgain=True
974 tmp=p[2]
975 while doAgain:
976 doAgain=False
977 for i in range(len(tmp)-1):
978 if type(tmp[i])==int and type(tmp[i+1]) in [list]:
979 if tmp[i]==len(tmp[i+1]):
980 nix=tmp[:i]+tmp[i+1:]
981 for i in range(len(tmp)):
982 tmp.pop()
983 tmp.extend(nix)
984 doAgain=True
985 break
986 if len(p)==4:
987 p[0] = ( p[1] , p[2] )
988 elif len(p)==3:
989 if p[2]==';':
990 p[0]= (p[1],'')
991 else:
992 p[0] = ( p[1] , p[2] )
993 else:
994 p[0] = ( self.emptyCnt , p[1] )
995 self.emptyCnt+=1
996
998 '''number : integer
999 | FCONST'''
1000 p[0] = p[1]
1001
1003 '''dimension : '[' number number number number number number number ']'
1004 | '[' number number number number number ']' '''
1005 result=p[2:-1]
1006 if len(result)==5:
1007 result+=[0,0]
1008
1009 p[0]=Dimension(*result)
1010
1012 '''vector : '(' number number number ')' '''
1013 if self.noVectorOrTensor:
1014 p[0]=p[2:5]
1015 else:
1016 p[0]=Vector(*p[2:5])
1017
1019 '''tensor : '(' number number number number number number number number number ')' '''
1020 if self.noVectorOrTensor:
1021 p[0]=p[2:11]
1022 else:
1023 p[0]=Tensor(*p[2:11])
1024
1026 '''symmtensor : '(' number number number number number number ')' '''
1027 if self.noVectorOrTensor:
1028 p[0]=p[2:8]
1029 else:
1030 p[0]=SymmTensor(*p[2:8])
1031
1038
1047
1049 '''dictitem : longitem
1050 | pitem'''
1051 if type(p[1])==tuple:
1052 if len(p[1])==2 and p[1][0]=="uniform":
1053 p[0]=Field(p[1][1])
1054 elif len(p[1])==3 and p[1][0]=="nonuniform":
1055 p[0]=Field(p[1][2],name=p[1][1])
1056 else:
1057 p[0]=TupleProxy(p[1])
1058 else:
1059 p[0] = p[1]
1060
1062 '''longitem : pitemlist pitem'''
1063 p[0] = p[1]+(p[2],)
1064
1066 '''pitemlist : pitemlist pitem
1067 | pitem '''
1068 if len(p)==2:
1069 p[0]=(p[1],)
1070 else:
1071
1072
1073
1074 p[0]=p[1]+(p[2],)
1075
1077 '''pitem : word
1078 | SCONST
1079 | number
1080 | dictionary
1081 | list
1082 | dimension
1083 | substitution
1084 | empty'''
1085 p[0] = p[1]
1086
1088 '''item : pitem
1089 | REACTION
1090 | list
1091 | dictionary'''
1092 p[0] = p[1]
1093
1097
1103
1104
1105
1111
1113 result="Error in PyFoamParser: '"+self.descr+"'"
1114 if self.data!=None:
1115 val=self.data.value
1116 if len(val)>100:
1117 val=val[:40]+" .... "+val[-40:]
1118
1119 result+=" @ %r (Type: %s ) in line %d at position %d" % (val,
1120 self.data.type,
1121 self.data.lineno,
1122 self.data.lexpos)
1123 else:
1124 result+=" NONE"
1125
1126 return result
1127
1130
1134
1136 """Convenience class that parses only a headerless OpenFOAM dictionary"""
1137
1138 - def __init__(self,
1139 content,
1140 debug=False,
1141 noVectorOrTensor=False,
1142 duplicateCheck=False,
1143 listDict=False,
1144 doMacroExpansion=False,
1145 duplicateFail=False):
1146 """@param content: the string to be parsed
1147 @param debug: output debug information during parsing"""
1148
1149 FoamFileParser.__init__(self,
1150 content,
1151 debug=debug,
1152 noHeader=not listDict,
1153 boundaryDict=False,
1154 listDict=listDict,
1155 noVectorOrTensor=noVectorOrTensor,
1156 duplicateCheck=duplicateCheck,
1157 doMacroExpansion=doMacroExpansion,
1158 duplicateFail=duplicateFail)
1159
1162
1164 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file"""
1165
1166 - def __init__(self,
1167 name,
1168 treatBinaryAsASCII=False,
1169 backup=False,
1170 debug=False):
1171 """@param name: The name of the parameter file
1172 @param backup: create a backup-copy of the file"""
1173
1174 ParsedParameterFile.__init__(self,
1175 name,
1176 backup=backup,
1177 treatBinaryAsASCII=treatBinaryAsASCII,
1178 debug=debug,
1179 boundaryDict=True)
1180
1181 - def parse(self,content):
1182 """Constructs a representation of the file"""
1183 temp=ParsedParameterFile.parse(self,content)
1184 self.content=DictProxy()
1185 for i in range(0,len(temp),2):
1186 self.content[temp[i]]=temp[i+1]
1187 return self.content
1188
1190 string="// File generated by PyFoam - sorry for the ugliness\n\n"
1191 temp=[]
1192 for k,v in iteritems(self.content):
1193 temp.append((k,v))
1194
1195 temp.sort(key=lambda x:int(x[1]["startFace"]))
1196
1197 temp2=[]
1198
1199 for b in temp:
1200 temp2.append(b[0])
1201 temp2.append(b[1])
1202
1203 generator=FoamFileGenerator(temp2,header=self.header)
1204 string+=str(generator)
1205
1206 return string
1207
1209 """Only parse the header of a file"""
1210
1213
1215 return self.header[name]
1216
1218 return name in self.header
1219
1221 return len(self.header)
1222
1223
1224