1
2 """Things that are needed for convenient parallel Execution"""
3
4 from PyFoam.Basics.Utilities import Utilities
5 from PyFoam.FoamInformation import foamMPI,oldAppConvention
6 from PyFoam.Error import error,warning,debug
7 from PyFoam import configuration as config
8
9 from os import path,environ,system
10 from string import strip
11 import commands
12
14 """Wrapper class for starting an stopping a LAM-Machine"""
15 - def __init__(self,machines=None,nr=None):
16 """@param machines: Name of the file with the machine information
17 @param nr: Number of processes"""
18
19 Utilities.__init__(self)
20
21 self.stop()
22
23 if machines=="":
24 machines=None
25
26 if machines==None and foamMPI()=="LAM":
27 error("Machinefile must be specified for LAM")
28
29 if machines==None and nr==None:
30 error("Either machinefile or Nr of CPUs must be specified for MPI type",foamMPI())
31
32 self.mFile=machines
33 self.procNr=nr
34
35 self.boot()
36 if not self.machineOK():
37 error("Error: LAM was not started")
38
40 """Check whether the LAM machine was properly booted"""
41 if self.running:
42 if(foamMPI()=="LAM"):
43 if self.cpuNr()<=0:
44 self.running=False
45
46 return self.running
47
49 """Stops a LAM-machine (if one is running)"""
50 self.running=False
51 if(foamMPI()=="LAM"):
52 self.execute("lamhalt -v")
53
55 """Boots a LAM-machine using the machine-file"""
56 if foamMPI()=="LAM":
57 warning("LAM is untested. Any Feedback most welcome")
58 self.execute("lamboot -s -v "+self.mFile)
59 self.running=True
60 elif foamMPI()=="OPENMPI" or foamMPI()=="SYSTEMOPENMPI":
61 self.running=True
62 else:
63 error(" Unknown or missing MPI-Implementation: "+foamMPI())
64
66 if(foamMPI()=="LAM"):
67 if self.running:
68 lines=self.execute("lamnodes")
69 nr=0
70 for l in lines:
71 tmp=l.split(':')
72 if len(tmp)>1:
73 nr+=int(tmp[1])
74 return nr
75 else:
76 return -1
77 elif(foamMPI()=="OPENMPI" or foamMPI()=="SYSTEMOPENMPI"):
78 if self.mFile:
79 f=open(self.mFile)
80 l=map(strip,f.readlines())
81 f.close()
82 nr=0
83 for m in l:
84 tmp=m.split()
85 if len(tmp)==1:
86 nr+=1
87 elif len(tmp)==0:
88 pass
89 else:
90 error("Machinefile not valid (I think): more than one element in one line:"+str(tmp)+"\nPropably you wrote a line in the form 'node1 cpu=2', but I only understand 'node1\\nnode1'")
91
92 if self.procNr==None:
93 return nr
94 else:
95 return min(nr,self.procNr)
96
97 elif self.procNr:
98 return self.procNr
99 else:
100 error("Can't determine Nr of CPUs without machinefile")
101
103 """Builds a list with a working mpirun command (for that MPI-Implementation)
104 @param argv: the original arguments that are to be wrapped
105 @param expandApplication: Expand the
106 @return: list with the correct mpirun-command"""
107
108 nr=str(self.cpuNr())
109 mpirun=[config().get("MPI","run_"+foamMPI(),default="mpirun")]
110
111 mpirun+=eval(config().get("MPI","options_"+foamMPI()+"_pre",default="[]"))
112
113 if(foamMPI()=="LAM"):
114 mpirun+=["-np",nr]
115 elif(foamMPI()=="OPENMPI" or foamMPI()=="SYSTEMOPENMPI"):
116 nr=[]
117 if "MPI_ARCH_PATH" in environ and config().getboolean("MPI","OpenMPI_add_prefix"):
118 nr+=["--prefix",environ["MPI_ARCH_PATH"]]
119 if self.procNr!=None:
120 nr+=["--n",str(self.procNr)]
121 machine=[]
122 if self.mFile!=None:
123 machine=["--machinefile",self.mFile]
124 if config().getdebug("ParallelExecution"):
125 debug("Start of",self.mFile)
126 debug("\n"+open(self.mFile).read())
127 debug("End of",self.mFile)
128 mpirun+=machine+nr
129 else:
130 error(" Unknown or missing MPI-Implementation for mpirun: "+foamMPI())
131
132 mpirun+=eval(config().get("MPI","options_"+foamMPI()+"_post",default="[]"))
133
134 progname=argv[0]
135 if expandApplication:
136 stat,progname=commands.getstatusoutput('which '+progname)
137 if stat:
138 progname=argv[0]
139 warning("which can not find a match for",progname,". Hoping for the best")
140
141 if oldAppConvention():
142 mpirun+=[progname]+argv[1:3]+["-parallel"]+argv[3:]
143 else:
144 mpirun+=[progname]+argv[1:]+["-parallel"]
145
146 if config().getdebug("ParallelExecution"):
147 debug("MPI:",foamMPI())
148 debug("Arguments:",mpirun)
149 system("which mpirun")
150 system("which rsh")
151 debug("Environment",environ)
152 for a in mpirun:
153 if a in environ:
154 debug("Transfering variable",a,"with value",environ[a])
155
156 return mpirun
157
159 """Write the parameter-File for a metis decomposition
160 @param sDir: Solution directory
161 @type sDir: PyFoam.RunDictionary.SolutionDirectory"""
162
163 params="method metis;\n"
164
165 self.writeDecomposition(sDir,params)
166
168 """Write the parameter-File for a metis decomposition
169 @param sDir: Solution directory
170 @type sDir: PyFoam.RunDictionary.SolutionDirectory
171 @param direction: direction in which to decompose (0=x, 1=y, 2=z)"""
172
173 params ="method simple;\n"
174 params+="\nsimpleCoeffs\n{\n\t n \t ("
175 if direction==0:
176 params+=str(self.cpuNr())+" "
177 else:
178 params+="1 "
179 if direction==1:
180 params+=str(self.cpuNr())+" "
181 else:
182 params+="1 "
183 if direction==2:
184 params+=str(self.cpuNr())
185 else:
186 params+="1"
187 params+=");\n\t delta \t 0.001;\n}\n"
188
189 self.writeDecomposition(sDir,params)
190
192 """Write parameter file for a decomposition
193 @param par:Parameters specific for that kind of decomposition
194 @type par:str
195 @param sDir: Solution directory
196 @type sDir: PyFoam.RunDictionary.SolutionDirectory"""
197
198 f=open(path.join(sDir.systemDir(),"decomposeParDict"),"w")
199 self.writeDictionaryHeader(f)
200 f.write("// * * * * * * * * * //\n\n")
201 f.write("numberOfSubdomains "+str(self.cpuNr())+";\n\n")
202 f.write(par)
203 f.write("\n\n// * * * * * * * * * //")
204 f.close()
205