1
2 """Parameter file is read into memory and modified there"""
3
4 from PyFoam.RunDictionary.FileBasis import FileBasisBackup
5 from PyFoam.Basics.PlyParser import PlyParser
6 from PyFoam.Basics.FoamFileGenerator import FoamFileGenerator
7
8 from PyFoam.Basics.DataStructures import Vector,Field,Dimension,DictProxy,TupleProxy,Tensor,SymmTensor,Unparsed,UnparsedList,Codestream,DictRedirection,BinaryBlob,BinaryList
9
10 from PyFoam.Error import error,warning,FatalErrorPyFoamException
11
12 from os import path
13 from copy import deepcopy
14 import sys
15
16 from PyFoam.ThirdParty.six import print_,integer_types,iteritems
17
19 """ Parameterfile whose complete representation is read into
20 memory, can be manipulated and afterwards written to disk"""
21
22 - def __init__(self,
23 name,
24 backup=False,
25 debug=False,
26 boundaryDict=False,
27 listDict=False,
28 listDictWithHeader=False,
29 listLengthUnparsed=None,
30 preserveComments=True,
31 noHeader=False,
32 binaryMode=False,
33 treatBinaryAsASCII=False,
34 noBody=False,
35 doMacroExpansion=False,
36 dontRead=False,
37 noVectorOrTensor=False,
38 dictStack=None,
39 createZipped=True,
40 longListOutputThreshold=20):
41 """@param name: The name of the parameter file
42 @param backup: create a backup-copy of the file
43 @param boundaryDict: the file to parse is a boundary file
44 @param listDict: the file only contains a list
45 @param listDictWithHeader: the file only contains a list and a header
46 @param listLengthUnparsed: Lists longer than that length are not parsed
47 @param binaryMode: Parse long lists in binary mode (to be overridden by
48 the settings in the header).
49 @param treatBinaryAsASCII: even if the header says that this is a
50 binary file treat it like an ASCII-file
51 @param noHeader: don't expect a header
52 @param noBody: don't read the body of the file (only the header)
53 @param doMacroExpansion: expand #include and $var
54 @param noVectorOrTensor: short lists of length 3, 6 an 9 are NOT
55 interpreted as vectors or tensors
56 @param dontRead: Do not read the file during construction
57 @param longListOutputThreshold: Lists that are longer than this are
58 prefixed with a length
59 @param dictStack: dictionary stack for lookup (only used for include)
60 """
61
62 self.noHeader=noHeader
63 self.noBody=noBody
64 FileBasisBackup.__init__(self,
65 name,
66 backup=backup,
67 createZipped=createZipped)
68 self.debug=debug
69 self.boundaryDict=boundaryDict
70 self.listDict=listDict
71 self.listDictWithHeader=listDictWithHeader
72 self.listLengthUnparsed=listLengthUnparsed
73 self.doMacros=doMacroExpansion
74 self.preserveComments=preserveComments
75 self.noVectorOrTensor=noVectorOrTensor
76 self.header=None
77 self.content=None
78 self.longListOutputThreshold=longListOutputThreshold
79 self.binaryMode=binaryMode
80 self.treatBinaryAsASCII=treatBinaryAsASCII
81 self.lastDecoration=""
82 self.dictStack=dictStack
83
84 if not dontRead:
85 self.readFile()
86
88 """Constructs a representation of the file"""
89 try:
90 parser=FoamFileParser(content,
91 debug=self.debug,
92 fName=self.name,
93 boundaryDict=self.boundaryDict,
94 listDict=self.listDict,
95 listDictWithHeader=self.listDictWithHeader,
96 listLengthUnparsed=self.listLengthUnparsed,
97 noHeader=self.noHeader,
98 noBody=self.noBody,
99 preserveComments=self.preserveComments,
100 binaryMode=self.binaryMode,
101 treatBinaryAsASCII=self.treatBinaryAsASCII,
102 noVectorOrTensor=self.noVectorOrTensor,
103 dictStack=self.dictStack,
104 doMacroExpansion=self.doMacros)
105 except BinaryParserError:
106 e = sys.exc_info()[1]
107 if not self.treatBinaryAsASCII:
108
109 parser=FoamFileParser(content,
110 debug=self.debug,
111 fName=self.name,
112 boundaryDict=self.boundaryDict,
113 listDict=self.listDict,
114 listDictWithHeader=self.listDictWithHeader,
115 listLengthUnparsed=self.listLengthUnparsed,
116 noHeader=self.noHeader,
117 noBody=self.noBody,
118 preserveComments=self.preserveComments,
119 binaryMode=self.binaryMode,
120 treatBinaryAsASCII=True,
121 noVectorOrTensor=self.noVectorOrTensor,
122 dictStack=self.dictStack,
123 doMacroExpansion=self.doMacros)
124 else:
125 raise e
126
127 self.content=parser.getData()
128 self.header=parser.getHeader()
129 self.lastDecoration=parser._decorationBuffer
130
131 return self.content
132
134 return key in self.content
135
137 return self.content[key]
138
140 self.content[key]=value
141
143 del self.content[key]
144
146 return len(self.content)
147
149 for key in self.content:
150 yield key
151
153 """Generates a string from the contents in memory
154 Used to be called makeString"""
155
156 string="// -*- C++ -*-\n// File generated by PyFoam - sorry for the ugliness\n\n"
157
158 generator=FoamFileGenerator(self.content,
159 header=self.header,
160 longListThreshold=self.longListOutputThreshold)
161 string+=generator.makeString(firstLevel=True)
162
163 if len(self.lastDecoration)>0:
164 string+="\n\n"+self.lastDecoration
165
166 return string
167
169 """Get a dictionary with the values with the decorators removed"""
170 result={}
171 if self.content:
172 for k in self.content:
173 if type(k) not in integer_types:
174 result[k]=self.content[k]
175 return result
176
178 """A specialization that is used to only write to the file"""
179 - def __init__(self,
180 name,
181 backup=False,
182 className="dictionary",
183 objectName=None,
184 createZipped=False):
185 ParsedParameterFile.__init__(self,
186 name,
187 backup=backup,
188 dontRead=True,
189 createZipped=createZipped)
190
191 if objectName==None:
192 objectName=path.basename(name)
193
194 self.content=DictProxy()
195 self.header={"version":"2.0",
196 "format":"ascii",
197 "class":className,
198 "object":objectName}
199
202 for number, name in enumerate(names):
203 setattr(self, name, number)
204
205 inputModes=Enumerate(["merge","error","warn","protect","overwrite","default"])
206
208 """Class that parses a string that contains the contents of an
209 OpenFOAM-file and builds a nested structure of directories and
210 lists from it"""
211
212 - def __init__(self,
213 content,
214 fName=None,
215 debug=False,
216 noHeader=False,
217 noBody=False,
218 doMacroExpansion=False,
219 boundaryDict=False,
220 preserveComments=True,
221 preserveNewlines=True,
222 listDict=False,
223 listDictWithHeader=False,
224 listLengthUnparsed=None,
225 binaryMode=False,
226 treatBinaryAsASCII=False,
227 duplicateCheck=False,
228 noVectorOrTensor=False,
229 dictStack=None,
230 duplicateFail=True):
231 """@param content: the string to be parsed
232 @param fName: Name of the actual file (if any)
233 @param debug: output debug information during parsing
234 @param noHeader: switch that turns off the parsing of the header
235 @param duplicateCheck: Check for duplicates in dictionaries
236 @param duplicateFail: Fail if a duplicate is discovered"""
237
238 self.binaryMode=binaryMode
239 self.treatBinaryAsASCII=treatBinaryAsASCII
240 self.fName=fName
241 self.data=None
242 self.header=None
243 self.debug=debug
244 self.listLengthUnparsed=listLengthUnparsed
245 self.doMacros=doMacroExpansion
246 self.preserveComments=preserveComments
247 self.preserveNewLines=preserveNewlines
248 self.duplicateCheck=duplicateCheck
249 self.duplicateFail=duplicateFail
250 self.noVectorOrTensor=noVectorOrTensor
251 self.inHeader=True
252 self.inBinary=False
253
254
255 self.collectDecorations=False
256 self.inputMode=inputModes.merge
257
258 self._decorationBuffer=""
259
260 startCnt=0
261
262 self.dictStack=dictStack
263 if self.dictStack==None:
264 self.dictStack=[DictProxy()]
265
266 if noBody:
267 self.start='noBody'
268 startCnt+=1
269
270 if noHeader:
271 self.inHeader=False
272 self.start='noHeader'
273 startCnt+=1
274 self.collectDecorations=True
275
276 if listDict:
277 self.inHeader=False
278 self.start='pureList'
279 startCnt+=1
280 self.dictStack=[]
281 self.collectDecorations=True
282
283 if listDictWithHeader:
284 self.start='pureListWithHeader'
285 startCnt+=1
286
287 if boundaryDict:
288 self.start='boundaryDict'
289 startCnt+=1
290
291 if startCnt>1:
292 error("Only one start symbol can be specified.",startCnt,"are specified")
293
294 PlyParser.__init__(self,debug=debug)
295
296
297
298
299 self.emptyCnt=0
300
301 self.header,self.data=self.parse(content)
302
304 return key in self.data
305
307 return self.data[key]
308
311
314
316 for key in self.data:
317 yield key
318
319
320
321
322
323
324
326 self._decorationBuffer=""
327
329 if self.collectDecorations:
330 self._decorationBuffer+=text
331
335
339
341 tmp=self._decorationBuffer
342 self.resetDecoration()
343 if len(tmp)>0:
344 if tmp[-1]=='\n':
345 tmp=tmp[:-1]
346 return tmp
347
349 if self.fName==None:
350 return path.curdir
351 else:
352 return path.dirname(self.fName)
353
355 """ Get the data structure"""
356 return self.data
357
359 """ Get the OpenFOAM-header"""
360 return self.header
361
362 - def printContext(self,c,ind):
363 """Prints the context of the current index"""
364 print_("------")
365 print_(c[max(0,ind-100):max(0,ind-1)])
366 print_("------")
367 print_(">",c[ind-1],"<")
368 print_("------")
369 print_(c[min(len(c),ind):min(len(c),ind+100)])
370 print_("------")
371
378
380 """Checks whether this list is a list that consists only of prefix-Lists"""
381 isAllPreList=False
382 if (len(orig) % 2)==0:
383 isAllPreList=True
384 for i in range(0,len(orig),2):
385 if type(orig[i])==int and (type(orig[i+1]) in [list,Vector,Tensor,SymmTensor]):
386 if len(orig[i+1])!=orig[i]:
387 isAllPreList=False
388 break
389 else:
390 isAllPreList=False
391 break
392
393 if isAllPreList:
394 return orig[1::2]
395 else:
396 return orig
397
398 tokens = (
399 'NAME',
400 'ICONST',
401 'FCONST',
402 'SCONST',
403 'FOAMFILE',
404 'UNIFORM',
405 'NONUNIFORM',
406 'UNPARSEDCHUNK',
407 'CODESTREAMCHUNK',
408 'REACTION',
409 'SUBSTITUTION',
410 'MERGE',
411 'OVERWRITE',
412 'ERROR',
413 'WARN',
414 'PROTECT',
415 'DEFAULT',
416 'INCLUDE',
417 'INCLUDEIFPRESENT',
418 'REMOVE',
419 'INPUTMODE',
420 'KANALGITTER',
421 'CODESTART',
422 'CODEEND',
423 'BINARYBLOB',
424 )
425
426 reserved = {
427 'FoamFile' : 'FOAMFILE',
428 'uniform' : 'UNIFORM',
429 'nonuniform' : 'NONUNIFORM',
430 'include' : 'INCLUDE',
431 'includeIfPresent': 'INCLUDEIFPRESENT',
432 'remove' : 'REMOVE',
433 'inputMode' : 'INPUTMODE',
434 'merge' : 'MERGE',
435 'overwrite' : 'OVERWRITE',
436 'error' : 'ERROR',
437 'warn' : 'WARN',
438 'protect' : 'PROTECT',
439 'default' : 'DEFAULT',
440 }
441
442 states = (
443 ('unparsed', 'exclusive'),
444 ('codestream', 'exclusive'),
445 ('mlcomment', 'exclusive'),
446 ('binaryblob', 'exclusive'),
447 )
448
450 r'\('
451 t.lexer.level+=1
452
453
455 r'\)'
456 t.lexer.level-=1
457
458 if t.lexer.level < 0 :
459 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-1]
460
461 t.lexer.lexpos-=1
462 t.type = "UNPARSEDCHUNK"
463 t.lexer.lineno += t.value.count('\n')
464 t.lexer.begin('INITIAL')
465 return t
466
467 t_unparsed_ignore = ' \t\n0123456789.-+e'
468
472
473 t_binaryblob_ignore = ''
474
476 r"\)"
477 size=t.lexer.lexpos-t.lexer.binary_start-1
478
479
480
481 if (size % t.lexer.binary_listlen)==0:
482
483 nextChar=t.lexer.lexdata[t.lexer.lexpos]
484 nextNextChar=t.lexer.lexdata[t.lexer.lexpos+1]
485 if (nextChar in [';','\n'] and nextNextChar=='\n'):
486 t.value = t.lexer.lexdata[t.lexer.binary_start:t.lexer.lexpos-1]
487 assert(len(t.value)%t.lexer.binary_listlen == 0)
488 t.lexer.lexpos-=1
489 t.type = "BINARYBLOB"
490 t.lexer.lineno += t.value.count('\n')
491 t.lexer.begin('INITIAL')
492 self.inBinary=False
493 return t
494
498
502
504 r"\#\}"
505 t.value = t.lexer.lexdata[t.lexer.code_start:t.lexer.lexpos-2]
506 t.lexer.lexpos-=2
507 t.type = "CODESTREAMCHUNK"
508 t.lexer.lineno += t.value.count('\n')
509 t.lexer.begin('INITIAL')
510 return t
511
512 t_codestream_ignore = ''
513
517
521
532
534 r'\$[a-zA-Z_][+\-<>(),.\*|a-zA-Z_0-9&%:]*'
535 t.type=self.reserved.get(t.value,'SUBSTITUTION')
536 if t.value[-1]==")":
537 if t.value.count(")")>t.value.count("("):
538
539 t.value=t.value[:-1]
540 t.lexer.lexpos-=1
541
542 return t
543
544 t_CODESTART = r'\#\{'
545
546 t_CODEEND = r'\#\}'
547
548 t_KANALGITTER = r'\#'
549
550 t_ICONST = r'(-|)\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
551
552 t_FCONST = r'(-|)((\d+)(\.\d*)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
553
554 t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
555
556 literals = "(){};[]"
557
558 t_ignore=" \t\r"
559
560
562 r'\n+'
563 t.lexer.lineno += len(t.value)
564 now=t.lexer.lexpos
565 next=t.lexer.lexdata.find('\n',now)
566 if next>=0:
567 line=t.lexer.lexdata[now:next]
568 pos=line.find("=")
569
570 if pos>=0 and not self.binaryMode:
571 if ((line.find("//")>=0 and line.find("//")<pos)) or (line.find("/*")>=0 and line.find("/*")<pos) or (line.find('"')>=0 and line.find('"')<pos):
572 return
573 t.value = line
574 t.type = "REACTION"
575 t.lexer.lineno += 1
576 t.lexer.lexpos = next
577 return t
578
579
580
585
591
595
604
608
609 t_mlcomment_ignore = ''
610
615
616
623
624
626 'global : header dictbody'
627 p[0] = ( p[1] , p[2] )
628
630 'gotHeader :'
631 p.lexer.lexpos=len(p.lexer.lexdata)
632 self.inHeader=False
633
634 - def p_noBody(self,p):
635 ''' noBody : FOAMFILE '{' dictbody gotHeader '}' '''
636 p[0] = ( p[3] , {} )
637
639 'noHeader : dictbody'
640 p[0] = ( None , p[1] )
641
643 'pureList : list'
644 p[0] = ( None , p[1] )
645
647 '''onlyListOrPList : list
648 | prelist '''
649 p[0]=p[1]
650
652 '''pureListWithHeader : header onlyListOrPList'''
653 p[0] = ( p[1] , p[2] )
654
656 'afterHeader :'
657 pass
658
660 '''boundaryDict : header list
661 | header prelist '''
662
663 p[0] = ( p[1] , p[2] )
664
666 'header : FOAMFILE dictionary'
667 self.inHeader=False
668 p[0] = p[2]
669
670
671
672
673
674
675
676
677
678
679
680 self.collectDecorations=True
681
683 '''macro : KANALGITTER include
684 | KANALGITTER inputMode
685 | KANALGITTER remove'''
686 p[0] = p[1]+p[2]+"\n"
687 if self.doMacros:
688 p[0]="// "+p[0]
689
691 '''include : INCLUDE SCONST
692 | INCLUDEIFPRESENT SCONST'''
693 if self.doMacros:
694 fName=path.join(self.directory(),p[2][1:-1])
695 read=True
696 if p[1]=="includeIfPresent" and not path.exists(fName):
697 read=False
698 if read and not path.exists(fName):
699 raise PyFoamParserError("The included file "+fName+" does not exist")
700 if read:
701 data=ParsedParameterFile(fName,
702 noHeader=True,
703 dictStack=self.dictStack,
704 doMacroExpansion=self.doMacros)
705 into=self.dictStack[-1]
706 for k in data:
707 into[k]=data[k]
708
709 p[0] = p[1] + " " + p[2]
710
720
722 '''remove : REMOVE word
723 | REMOVE wlist'''
724 p[0] = p[1] + " "
725 if type(p[2])==str:
726 p[0]+=p[2]
727 else:
728 p[0]+="( "
729 for w in p[2]:
730 p[0]+=w+" "
731 p[0]+=")"
732
734 '''integer : ICONST'''
735 p[0] = int(p[1])
736
738 '''integer : FCONST'''
739 p[0] = float(p[1])
740
744
746 '''exit_dict :'''
747 p[0]=self.dictStack.pop()
748
750 '''dictionary : '{' enter_dict dictbody '}' exit_dict
751 | '{' '}' '''
752 if len(p)==6:
753 p[0] = p[5]
754 else:
755 p[0] = DictProxy()
756
757 - def p_dictbody(self,p):
758 '''dictbody : dictbody dictline
759 | dictline
760 | empty'''
761
762 if len(p)==3:
763 p[0]=p[1]
764 if self.duplicateCheck:
765 if p[2][0] in p[0]:
766 if self.duplicateFail:
767 error("Key",p[2][0],"already defined")
768 else:
769 warning("Key",p[2][0],"already defined")
770 if type(p[2][0])==DictRedirection and p[2][1]=='':
771 p[0].addRedirection(p[2][0])
772 else:
773 if type(p[2][1])==DictRedirection:
774 p[0][p[2][0]]=p[2][1].getContent()
775 else:
776 p[0][p[2][0]]=p[2][1]
777 p[0].addDecoration(p[2][0],self.getDecoration())
778 else:
779 p[0]=self.dictStack[-1]
780
781 if p[1]:
782 if type(p[1][0])==DictRedirection and p[1][1]=='':
783 p[0].addRedirection(p[1][0])
784 else:
785 if type(p[1][1])==DictRedirection:
786 p[0][p[1][0]]=p[1][1].getContent()
787 else:
788 p[0][p[1][0]]=p[1][1]
789
790
809
811 '''wlist : '(' wordlist ')' '''
812 p[0] = p[2]
813
815 '''unparsed : UNPARSEDCHUNK'''
816 p[0] = Unparsed(p[1])
817
819 '''binaryblob : BINARYBLOB'''
820 p[0] = BinaryBlob(p[1])
821
823 '''prelist_seen : '''
824 if self.binaryMode:
825 p.lexer.begin('binaryblob')
826 p.lexer.binary_start = p.lexer.lexpos
827 p.lexer.binary_listlen = p[-1]
828 self.inBinary=True
829 elif self.listLengthUnparsed!=None:
830 if int(p[-1])>=self.listLengthUnparsed:
831 p.lexer.begin('unparsed')
832 p.lexer.level=0
833 p.lexer.code_start = p.lexer.lexpos
834
836 '''codestream : codeSeen CODESTART CODESTREAMCHUNK CODEEND '''
837 p[0] = Codestream(p[3])
838
840 '''codeSeen : '''
841 p.lexer.begin('codestream')
842 p.lexer.level=0
843 p.lexer.code_start = p.lexer.lexpos
844
846 '''prelist : integer prelist_seen '(' itemlist ')'
847 | integer prelist_seen '(' binaryblob ')'
848 | integer prelist_seen '(' unparsed ')' '''
849 if type(p[4])==Unparsed:
850 p[0] = UnparsedList(int(p[1]),p[4].data)
851 elif type(p[4])==BinaryBlob:
852 p[0] = BinaryList(int(p[1]),p[4].data)
853 else:
854 p[0] = self.condenseAllPreFixLists(p[4])
855
857 '''itemlist : itemlist item
858 | itemlist ';'
859 | item '''
860 if len(p)==2:
861 if p[1]==None:
862 p[0]=[]
863 else:
864 p[0]=[ p[1] ]
865 else:
866 p[0]=p[1]
867 if p[2]!=';':
868 p[0].append(p[2])
869
871 '''wordlist : wordlist word
872 | word '''
873 if len(p)==2:
874 if p[1]==None:
875 p[0]=[]
876 else:
877 p[0]=[ p[1] ]
878 else:
879 p[0]=p[1]
880 p[0].append(p[2])
881
883 '''word : NAME
884 | UNIFORM
885 | NONUNIFORM
886 | MERGE
887 | OVERWRITE
888 | DEFAULT
889 | WARN
890 | PROTECT
891 | ERROR'''
892 p[0]=p[1]
893
895 '''substitution : SUBSTITUTION'''
896 if self.doMacros:
897 nm=p[1][1:]
898 p[0]="<Symbol '"+nm+"' not found>"
899 for di in reversed(self.dictStack):
900 if nm in di:
901 p[0]=DictRedirection(deepcopy(di[nm]),
902 di[nm],
903 nm)
904 return
905 else:
906 p[0]=p[1]
907
909 '''dictkey : word
910 | SCONST'''
911 p[0]=p[1]
912
914 '''dictline : dictkey dictitem ';'
915 | dictkey list ';'
916 | dictkey prelist ';'
917 | dictkey fieldvalue ';'
918 | macro
919 | substitution ';'
920 | dictkey codestream ';'
921 | dictkey dictionary'''
922 if len(p)==4 and self.inHeader and p[1]=="format" and type(p[2])==str:
923 if p[2]=="binary":
924 if not self.treatBinaryAsASCII:
925 self.binaryMode=True
926 else:
927 self.binaryMode=False
928 elif p[2]=="ascii":
929 self.binaryMode=False
930 else:
931 raise FatalErrorPyFoamException("Don't know how to parse file format",p[0]["format"])
932
933 if len(p)==4 and type(p[2])==list:
934
935 doAgain=True
936 tmp=p[2]
937 while doAgain:
938 doAgain=False
939 for i in range(len(tmp)-1):
940 if type(tmp[i])==int and type(tmp[i+1]) in [list]:
941 if tmp[i]==len(tmp[i+1]):
942 nix=tmp[:i]+tmp[i+1:]
943 for i in range(len(tmp)):
944 tmp.pop()
945 tmp.extend(nix)
946 doAgain=True
947 break
948 if len(p)==4:
949 p[0] = ( p[1] , p[2] )
950 elif len(p)==3:
951 if p[2]==';':
952 p[0]= (p[1],'')
953 else:
954 p[0] = ( p[1] , p[2] )
955 else:
956 p[0] = ( self.emptyCnt , p[1] )
957 self.emptyCnt+=1
958
960 '''number : integer
961 | FCONST'''
962 p[0] = p[1]
963
965 '''dimension : '[' number number number number number number number ']'
966 | '[' number number number number number ']' '''
967 result=p[2:-1]
968 if len(result)==5:
969 result+=[0,0]
970
971 p[0]=Dimension(*result)
972
974 '''vector : '(' number number number ')' '''
975 if self.noVectorOrTensor:
976 p[0]=p[2:5]
977 else:
978 p[0]=Vector(*p[2:5])
979
981 '''tensor : '(' number number number number number number number number number ')' '''
982 if self.noVectorOrTensor:
983 p[0]=p[2:11]
984 else:
985 p[0]=Tensor(*p[2:11])
986
988 '''symmtensor : '(' number number number number number number ')' '''
989 if self.noVectorOrTensor:
990 p[0]=p[2:8]
991 else:
992 p[0]=SymmTensor(*p[2:8])
993
1000
1009
1011 '''dictitem : longitem
1012 | pitem'''
1013 if type(p[1])==tuple:
1014 if len(p[1])==2 and p[1][0]=="uniform":
1015 p[0]=Field(p[1][1])
1016 elif len(p[1])==3 and p[1][0]=="nonuniform":
1017 p[0]=Field(p[1][2],name=p[1][1])
1018 else:
1019 p[0]=TupleProxy(p[1])
1020 else:
1021 p[0] = p[1]
1022
1024 '''longitem : pitemlist pitem'''
1025 p[0] = p[1]+(p[2],)
1026
1028 '''pitemlist : pitemlist pitem
1029 | pitem '''
1030 if len(p)==2:
1031 p[0]=(p[1],)
1032 else:
1033
1034
1035
1036 p[0]=p[1]+(p[2],)
1037
1039 '''pitem : word
1040 | SCONST
1041 | number
1042 | dictionary
1043 | list
1044 | dimension
1045 | substitution
1046 | empty'''
1047 p[0] = p[1]
1048
1050 '''item : pitem
1051 | REACTION
1052 | list
1053 | dictionary'''
1054 p[0] = p[1]
1055
1059
1065
1066
1067
1073
1075 result="Error in PyFoamParser: '"+self.descr+"'"
1076 if self.data!=None:
1077 val=self.data.value
1078 if len(val)>100:
1079 val=val[:40]+" .... "+val[-40:]
1080
1081 result+=" @ %r (Type: %s ) in line %d at position %d" % (val,
1082 self.data.type,
1083 self.data.lineno,
1084 self.data.lexpos)
1085 else:
1086 result+=" NONE"
1087
1088 return result
1089
1092
1096
1098 """Convenience class that parses only a headerless OpenFOAM dictionary"""
1099
1100 - def __init__(self,
1101 content,
1102 debug=False,
1103 noVectorOrTensor=False,
1104 duplicateCheck=False,
1105 listDict=False,
1106 doMacroExpansion=False,
1107 duplicateFail=False):
1108 """@param content: the string to be parsed
1109 @param debug: output debug information during parsing"""
1110
1111 FoamFileParser.__init__(self,
1112 content,
1113 debug=debug,
1114 noHeader=not listDict,
1115 boundaryDict=False,
1116 listDict=listDict,
1117 noVectorOrTensor=noVectorOrTensor,
1118 duplicateCheck=duplicateCheck,
1119 doMacroExpansion=doMacroExpansion,
1120 duplicateFail=duplicateFail)
1121
1124
1126 """Convenience class that parses only a OpenFOAM polyMesh-boundaries file"""
1127
1128 - def __init__(self,
1129 name,
1130 treatBinaryAsASCII=False,
1131 backup=False,
1132 debug=False):
1133 """@param name: The name of the parameter file
1134 @param backup: create a backup-copy of the file"""
1135
1136 ParsedParameterFile.__init__(self,
1137 name,
1138 backup=backup,
1139 treatBinaryAsASCII=treatBinaryAsASCII,
1140 debug=debug,
1141 boundaryDict=True)
1142
1143 - def parse(self,content):
1144 """Constructs a representation of the file"""
1145 temp=ParsedParameterFile.parse(self,content)
1146 self.content=DictProxy()
1147 for i in range(0,len(temp),2):
1148 self.content[temp[i]]=temp[i+1]
1149 return self.content
1150
1152 string="// File generated by PyFoam - sorry for the ugliness\n\n"
1153 temp=[]
1154 for k,v in iteritems(self.content):
1155 temp.append((k,v))
1156
1157 temp.sort(lambda x,y:cmp(int(x[1]["startFace"]),int(y[1]["startFace"])))
1158
1159 temp2=[]
1160
1161 for b in temp:
1162 temp2.append(b[0])
1163 temp2.append(b[1])
1164
1165 generator=FoamFileGenerator(temp2,header=self.header)
1166 string+=str(generator)
1167
1168 return string
1169
1171 """Only parse the header of a file"""
1172
1175
1177 return self.header[name]
1178
1180 return name in self.header
1181
1183 return len(self.header)
1184
1185
1186