1 |
gezelter |
246 |
!! |
2 |
|
|
!! Copyright (c) 2005 The University of Notre Dame. All Rights Reserved. |
3 |
|
|
!! |
4 |
|
|
!! The University of Notre Dame grants you ("Licensee") a |
5 |
|
|
!! non-exclusive, royalty free, license to use, modify and |
6 |
|
|
!! redistribute this software in source and binary code form, provided |
7 |
|
|
!! that the following conditions are met: |
8 |
|
|
!! |
9 |
|
|
!! 1. Acknowledgement of the program authors must be made in any |
10 |
|
|
!! publication of scientific results based in part on use of the |
11 |
|
|
!! program. An acceptable form of acknowledgement is citation of |
12 |
|
|
!! the article in which the program was described (Matthew |
13 |
|
|
!! A. Meineke, Charles F. Vardeman II, Teng Lin, Christopher |
14 |
|
|
!! J. Fennell and J. Daniel Gezelter, "OOPSE: An Object-Oriented |
15 |
|
|
!! Parallel Simulation Engine for Molecular Dynamics," |
16 |
|
|
!! J. Comput. Chem. 26, pp. 252-271 (2005)) |
17 |
|
|
!! |
18 |
|
|
!! 2. Redistributions of source code must retain the above copyright |
19 |
|
|
!! notice, this list of conditions and the following disclaimer. |
20 |
|
|
!! |
21 |
|
|
!! 3. Redistributions in binary form must reproduce the above copyright |
22 |
|
|
!! notice, this list of conditions and the following disclaimer in the |
23 |
|
|
!! documentation and/or other materials provided with the |
24 |
|
|
!! distribution. |
25 |
|
|
!! |
26 |
|
|
!! This software is provided "AS IS," without a warranty of any |
27 |
|
|
!! kind. All express or implied conditions, representations and |
28 |
|
|
!! warranties, including any implied warranty of merchantability, |
29 |
|
|
!! fitness for a particular purpose or non-infringement, are hereby |
30 |
|
|
!! excluded. The University of Notre Dame and its licensors shall not |
31 |
|
|
!! be liable for any damages suffered by licensee as a result of |
32 |
|
|
!! using, modifying or distributing the software or its |
33 |
|
|
!! derivatives. In no event will the University of Notre Dame or its |
34 |
|
|
!! licensors be liable for any lost revenue, profit or data, or for |
35 |
|
|
!! direct, indirect, special, consequential, incidental or punitive |
36 |
|
|
!! damages, however caused and regardless of the theory of liability, |
37 |
|
|
!! arising out of the use of or inability to use software, even if the |
38 |
|
|
!! University of Notre Dame has been advised of the possibility of |
39 |
|
|
!! such damages. |
40 |
|
|
!! |
41 |
chuckv |
126 |
|
42 |
gezelter |
246 |
|
43 |
chuckv |
126 |
!! MPI support for long range forces using force decomposition |
44 |
|
|
!! on a square grid of processors. |
45 |
gezelter |
1346 |
!! Corresponds to mpiSimulation.cpp for C++ |
46 |
chuckv |
126 |
!! mpi_module also contains a private interface for mpi f90 routines. |
47 |
|
|
!! |
48 |
|
|
!! @author Charles F. Vardeman II |
49 |
|
|
!! @author Matthew Meineke |
50 |
gezelter |
1346 |
!! @version $Id: simParallel.F90,v 1.12 2009-05-19 20:21:59 gezelter Exp $, $Date: 2009-05-19 20:21:59 $, $Name: not supported by cvs2svn $, $Revision: 1.12 $ |
51 |
chuckv |
126 |
|
52 |
|
|
module mpiSimulation |
53 |
|
|
use definitions |
54 |
chuckv |
928 |
use status |
55 |
chuckv |
126 |
#ifdef IS_MPI |
56 |
|
|
use oopseMPI |
57 |
|
|
implicit none |
58 |
|
|
PRIVATE |
59 |
chuckv |
1228 |
#endif |
60 |
chuckv |
126 |
|
61 |
|
|
|
62 |
chuckv |
1228 |
!! Include mpiComponentPlan type. mpiComponentPlan is a |
63 |
|
|
!! dual header file for both c and fortran. |
64 |
|
|
#define __FORTRAN90 |
65 |
|
|
#include "UseTheForce/mpiComponentPlan.h" |
66 |
|
|
|
67 |
|
|
#ifdef IS_MPI |
68 |
|
|
|
69 |
gezelter |
507 |
!! PUBLIC Subroutines contained in this module |
70 |
|
|
!! gather and scatter are a generic interface |
71 |
|
|
!! to gather and scatter routines |
72 |
chuckv |
126 |
public :: gather, scatter |
73 |
|
|
public :: setupSimParallel |
74 |
|
|
public :: replanSimParallel |
75 |
|
|
public :: getNatomsInCol |
76 |
|
|
public :: getNatomsInRow |
77 |
|
|
public :: getNgroupsInCol |
78 |
|
|
public :: getNgroupsInRow |
79 |
|
|
public :: isMPISimSet |
80 |
|
|
public :: printComponentPlan |
81 |
|
|
public :: getMyNode |
82 |
|
|
|
83 |
gezelter |
507 |
!! PUBLIC Subroutines contained in MPI module |
84 |
chuckv |
126 |
public :: mpi_bcast |
85 |
|
|
public :: mpi_allreduce |
86 |
gezelter |
507 |
! public :: mpi_reduce |
87 |
chuckv |
126 |
public :: mpi_send |
88 |
|
|
public :: mpi_recv |
89 |
|
|
public :: mpi_get_processor_name |
90 |
|
|
public :: mpi_finalize |
91 |
|
|
|
92 |
gezelter |
507 |
!! PUBLIC mpi variables |
93 |
chuckv |
126 |
public :: mpi_comm_world |
94 |
|
|
public :: mpi_character |
95 |
|
|
public :: mpi_integer |
96 |
chuckv |
588 |
public :: mpi_lor |
97 |
|
|
public :: mpi_logical |
98 |
gezelter |
962 |
public :: mpi_real |
99 |
chuckv |
126 |
public :: mpi_double_precision |
100 |
|
|
public :: mpi_sum |
101 |
|
|
public :: mpi_max |
102 |
|
|
public :: mpi_status_size |
103 |
|
|
public :: mpi_any_source |
104 |
|
|
|
105 |
|
|
|
106 |
|
|
|
107 |
gezelter |
507 |
!! Safety logical to prevent access to ComponetPlan until |
108 |
|
|
!! set by C++. |
109 |
chuckv |
126 |
logical, save :: ComponentPlanSet = .false. |
110 |
|
|
|
111 |
|
|
|
112 |
gezelter |
507 |
!! generic mpi error declaration. |
113 |
chuckv |
126 |
integer, public :: mpi_err |
114 |
chuckv |
928 |
character(len = statusMsgSize) :: errMsg |
115 |
chuckv |
126 |
|
116 |
|
|
#ifdef PROFILE |
117 |
|
|
public :: printCommTime |
118 |
|
|
public :: getCommTime |
119 |
|
|
real,save :: commTime = 0.0 |
120 |
|
|
real :: commTimeInitial,commTimeFinal |
121 |
|
|
#endif |
122 |
|
|
|
123 |
|
|
|
124 |
gezelter |
507 |
!! Tags used during force loop for parallel simulation |
125 |
chuckv |
126 |
integer, public, allocatable, dimension(:) :: AtomLocalToGlobal |
126 |
|
|
integer, public, allocatable, dimension(:) :: AtomRowToGlobal |
127 |
|
|
integer, public, allocatable, dimension(:) :: AtomColToGlobal |
128 |
gezelter |
1346 |
integer, public, allocatable, dimension(:) :: AtomGlobalToLocal |
129 |
chuckv |
126 |
integer, public, allocatable, dimension(:) :: GroupLocalToGlobal |
130 |
|
|
integer, public, allocatable, dimension(:) :: GroupRowToGlobal |
131 |
|
|
integer, public, allocatable, dimension(:) :: GroupColToGlobal |
132 |
|
|
|
133 |
gezelter |
507 |
!! Logical set true if mpiSimulation has been initialized |
134 |
chuckv |
126 |
logical, save :: isSimSet = .false. |
135 |
|
|
|
136 |
|
|
|
137 |
|
|
type (mpiComponentPlan), save :: mpiSim |
138 |
|
|
|
139 |
gezelter |
507 |
!! gs_plan contains plans for gather and scatter routines |
140 |
chuckv |
126 |
type, public :: gs_plan |
141 |
|
|
private |
142 |
|
|
type (mpiComponentPlan), pointer :: gsComponentPlan => NULL() |
143 |
|
|
integer :: gsPlanSize !! size of this plan (nDim*nComponents) |
144 |
|
|
integer :: globalPlanSize !! size of all components in plan |
145 |
|
|
integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0. |
146 |
|
|
integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0. |
147 |
|
|
integer :: myPlanComm !! My communicator for this plan |
148 |
|
|
integer :: myPlanRank !! My rank in this plan |
149 |
|
|
integer :: planNprocs !! Number of processors in this plan |
150 |
|
|
end type gs_plan |
151 |
|
|
|
152 |
gezelter |
507 |
! plans for different decompositions |
153 |
chuckv |
126 |
type (gs_plan), public, save :: plan_atom_row |
154 |
|
|
type (gs_plan), public, save :: plan_atom_row_3d |
155 |
|
|
type (gs_plan), public, save :: plan_atom_col |
156 |
|
|
type (gs_plan), public, save :: plan_atom_col_3d |
157 |
|
|
type (gs_plan), public, save :: plan_atom_row_Rotation |
158 |
|
|
type (gs_plan), public, save :: plan_atom_col_Rotation |
159 |
|
|
type (gs_plan), public, save :: plan_group_row |
160 |
|
|
type (gs_plan), public, save :: plan_group_col |
161 |
|
|
type (gs_plan), public, save :: plan_group_row_3d |
162 |
|
|
type (gs_plan), public, save :: plan_group_col_3d |
163 |
|
|
|
164 |
|
|
type (mpiComponentPlan), pointer :: simComponentPlan |
165 |
|
|
|
166 |
gezelter |
507 |
! interface for gather scatter routines |
167 |
|
|
!! Generic interface for gather. |
168 |
|
|
!! Gathers an local array into row or column array |
169 |
|
|
!! Interface provided for integer, double and double |
170 |
|
|
!! rank 2 arrays. |
171 |
chuckv |
126 |
interface gather |
172 |
|
|
module procedure gather_integer |
173 |
|
|
module procedure gather_double |
174 |
|
|
module procedure gather_double_2d |
175 |
|
|
end interface |
176 |
|
|
|
177 |
gezelter |
507 |
!! Generic interface for scatter. |
178 |
|
|
!! Scatters a row or column array, adding componets |
179 |
|
|
!! and reducing them to a local nComponent array. |
180 |
|
|
!! Interface provided for double and double rank=2 arrays. |
181 |
chuckv |
126 |
|
182 |
|
|
interface scatter |
183 |
|
|
module procedure scatter_double |
184 |
|
|
module procedure scatter_double_2d |
185 |
|
|
end interface |
186 |
|
|
|
187 |
|
|
|
188 |
|
|
|
189 |
|
|
contains |
190 |
|
|
|
191 |
|
|
!! Sets up mpiComponentPlan with structure passed from C++. |
192 |
|
|
subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, & |
193 |
|
|
nGroupTags, groupTags, status) |
194 |
|
|
!! Passed Arguments |
195 |
|
|
!! mpiComponentPlan struct from C |
196 |
|
|
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
197 |
|
|
!! Number of tags passed |
198 |
|
|
integer, intent(in) :: nAtomTags, nGroupTags |
199 |
|
|
!! Result status, 0 = normal, -1 = error |
200 |
|
|
integer, intent(out) :: status |
201 |
|
|
integer :: localStatus |
202 |
|
|
!! Global reference tag for local particles |
203 |
|
|
integer, dimension(nAtomTags), intent(inout) :: atomTags |
204 |
|
|
integer, dimension(nGroupTags), intent(inout) :: groupTags |
205 |
|
|
|
206 |
|
|
!write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & |
207 |
|
|
! ' has atomTags(1) = ', atomTags(1) |
208 |
gezelter |
507 |
|
209 |
chuckv |
126 |
status = 0 |
210 |
|
|
if (componentPlanSet) then |
211 |
|
|
return |
212 |
|
|
endif |
213 |
|
|
componentPlanSet = .true. |
214 |
gezelter |
507 |
|
215 |
chuckv |
126 |
!! copy c component plan to fortran |
216 |
|
|
mpiSim = thisComponentPlan |
217 |
gezelter |
507 |
|
218 |
chuckv |
126 |
call make_Force_Grid(mpiSim, localStatus) |
219 |
|
|
if (localStatus /= 0) then |
220 |
chuckv |
928 |
write(errMsg, *) 'An error in making the force grid has occurred' |
221 |
|
|
call handleError("setupSimParallel", errMsg) |
222 |
chuckv |
126 |
status = -1 |
223 |
|
|
return |
224 |
|
|
endif |
225 |
gezelter |
507 |
|
226 |
chuckv |
126 |
call updateGridComponents(mpiSim, localStatus) |
227 |
|
|
if (localStatus /= 0) then |
228 |
chuckv |
928 |
write(errMsg,*) "Error updating grid components" |
229 |
|
|
call handleError("setupSimParallel", errMsg) |
230 |
chuckv |
126 |
status = -1 |
231 |
|
|
return |
232 |
|
|
endif |
233 |
|
|
|
234 |
|
|
!! initialize gather and scatter plans used in this simulation |
235 |
|
|
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
236 |
|
|
mpiSim, mpiSim%rowComm, plan_atom_row) |
237 |
|
|
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
238 |
|
|
mpiSim, mpiSim%rowComm, plan_atom_row_3d) |
239 |
|
|
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
240 |
|
|
mpiSim, mpiSim%rowComm, plan_atom_row_Rotation) |
241 |
|
|
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
242 |
|
|
mpiSim, mpiSim%rowComm, plan_group_row) |
243 |
|
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
244 |
|
|
mpiSim, mpiSim%rowComm, plan_group_row_3d) |
245 |
gezelter |
507 |
|
246 |
chuckv |
126 |
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
247 |
|
|
mpiSim, mpiSim%columnComm, plan_atom_col) |
248 |
|
|
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
249 |
|
|
mpiSim, mpiSim%columnComm, plan_atom_col_3d) |
250 |
|
|
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
251 |
|
|
mpiSim, mpiSim%columnComm, plan_atom_col_Rotation) |
252 |
|
|
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
253 |
|
|
mpiSim, mpiSim%columnComm, plan_group_col) |
254 |
|
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
255 |
|
|
mpiSim, mpiSim%columnComm, plan_group_col_3d) |
256 |
|
|
|
257 |
gezelter |
507 |
! Initialize tags |
258 |
|
|
|
259 |
chuckv |
126 |
call setAtomTags(atomTags,localStatus) |
260 |
|
|
if (localStatus /= 0) then |
261 |
chuckv |
928 |
write(errMsg, *) 'An error in setting Atom Tags has occured' |
262 |
|
|
call handleError("setupSimParallel", errMsg) |
263 |
chuckv |
126 |
status = -1 |
264 |
|
|
return |
265 |
|
|
endif |
266 |
|
|
|
267 |
|
|
|
268 |
|
|
call setGroupTags(groupTags,localStatus) |
269 |
|
|
if (localStatus /= 0) then |
270 |
chuckv |
928 |
write(errMsg, *) 'An error in setting Group Tags has occured' |
271 |
|
|
call handleError("setupSimParallel", errMsg) |
272 |
chuckv |
126 |
status = -1 |
273 |
|
|
return |
274 |
|
|
endif |
275 |
|
|
|
276 |
|
|
isSimSet = .true. |
277 |
|
|
|
278 |
gezelter |
507 |
! call printComponentPlan(mpiSim,0) |
279 |
chuckv |
126 |
end subroutine setupSimParallel |
280 |
|
|
|
281 |
|
|
subroutine replanSimParallel(thisComponentPlan,status) |
282 |
gezelter |
507 |
! Passed Arguments |
283 |
chuckv |
126 |
!! mpiComponentPlan struct from C |
284 |
|
|
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
285 |
|
|
integer, intent(out) :: status |
286 |
|
|
integer :: localStatus |
287 |
|
|
integer :: mpierror |
288 |
|
|
status = 0 |
289 |
|
|
|
290 |
|
|
call updateGridComponents(thisComponentPlan,localStatus) |
291 |
|
|
if (localStatus /= 0) then |
292 |
|
|
status = -1 |
293 |
|
|
return |
294 |
|
|
endif |
295 |
gezelter |
507 |
|
296 |
chuckv |
126 |
!! Unplan Gather Scatter plans |
297 |
|
|
call unplan_gather_scatter(plan_atom_row) |
298 |
|
|
call unplan_gather_scatter(plan_atom_row_3d) |
299 |
|
|
call unplan_gather_scatter(plan_atom_row_Rotation) |
300 |
|
|
call unplan_gather_scatter(plan_group_row) |
301 |
|
|
call unplan_gather_scatter(plan_group_row_3d) |
302 |
|
|
|
303 |
|
|
call unplan_gather_scatter(plan_atom_col) |
304 |
|
|
call unplan_gather_scatter(plan_atom_col_3d) |
305 |
|
|
call unplan_gather_scatter(plan_atom_col_Rotation) |
306 |
|
|
call unplan_gather_scatter(plan_group_col) |
307 |
|
|
call unplan_gather_scatter(plan_group_col_3d) |
308 |
|
|
|
309 |
|
|
!! initialize gather and scatter plans used in this simulation |
310 |
|
|
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
311 |
|
|
mpiSim, mpiSim%rowComm, plan_atom_row) |
312 |
|
|
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
313 |
|
|
mpiSim, mpiSim%rowComm, plan_atom_row_3d) |
314 |
|
|
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
315 |
|
|
mpiSim, mpiSim%rowComm, plan_atom_row_Rotation) |
316 |
|
|
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
317 |
|
|
mpiSim, mpiSim%rowComm, plan_group_row) |
318 |
|
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
319 |
|
|
mpiSim, mpiSim%rowComm, plan_group_row_3d) |
320 |
gezelter |
507 |
|
321 |
chuckv |
126 |
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
322 |
|
|
mpiSim, mpiSim%columnComm, plan_atom_col) |
323 |
|
|
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
324 |
|
|
mpiSim, mpiSim%columnComm, plan_atom_col_3d) |
325 |
|
|
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
326 |
|
|
mpiSim, mpiSim%columnComm, plan_atom_col_Rotation) |
327 |
|
|
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
328 |
|
|
mpiSim, mpiSim%columnComm, plan_group_col) |
329 |
|
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
330 |
|
|
mpiSim, mpiSim%columnComm, plan_group_col_3d) |
331 |
gezelter |
507 |
|
332 |
chuckv |
126 |
end subroutine replanSimParallel |
333 |
|
|
|
334 |
|
|
!! Updates number of row and column components for long range forces. |
335 |
|
|
subroutine updateGridComponents(thisComponentPlan, status) |
336 |
|
|
type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan |
337 |
gezelter |
507 |
|
338 |
chuckv |
126 |
!! Status return |
339 |
|
|
!! - 0 Success |
340 |
|
|
!! - -1 Failure |
341 |
|
|
integer, intent(out) :: status |
342 |
|
|
integer :: nAtomsLocal |
343 |
|
|
integer :: nAtomsInRow = 0 |
344 |
|
|
integer :: nAtomsInColumn = 0 |
345 |
|
|
integer :: nGroupsLocal |
346 |
|
|
integer :: nGroupsInRow = 0 |
347 |
|
|
integer :: nGroupsInColumn = 0 |
348 |
|
|
integer :: mpiErrors |
349 |
|
|
|
350 |
|
|
status = 0 |
351 |
|
|
if (.not. componentPlanSet) return |
352 |
|
|
|
353 |
|
|
if (thisComponentPlan%nAtomsLocal == 0) then |
354 |
|
|
status = -1 |
355 |
|
|
return |
356 |
gezelter |
507 |
endif |
357 |
chuckv |
126 |
if (thisComponentPlan%nGroupsLocal == 0) then |
358 |
|
|
write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal |
359 |
|
|
status = -1 |
360 |
|
|
return |
361 |
|
|
endif |
362 |
gezelter |
507 |
|
363 |
chuckv |
126 |
nAtomsLocal = thisComponentPlan%nAtomsLocal |
364 |
|
|
nGroupsLocal = thisComponentPlan%nGroupsLocal |
365 |
|
|
|
366 |
|
|
call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, & |
367 |
|
|
mpi_sum, thisComponentPlan%rowComm, mpiErrors) |
368 |
|
|
if (mpiErrors /= 0) then |
369 |
|
|
status = -1 |
370 |
|
|
return |
371 |
|
|
endif |
372 |
|
|
|
373 |
|
|
call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, & |
374 |
|
|
mpi_sum, thisComponentPlan%columnComm, mpiErrors) |
375 |
|
|
if (mpiErrors /= 0) then |
376 |
|
|
status = -1 |
377 |
|
|
return |
378 |
|
|
endif |
379 |
gezelter |
507 |
|
380 |
chuckv |
126 |
call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, & |
381 |
|
|
mpi_sum, thisComponentPlan%rowComm, mpiErrors) |
382 |
|
|
if (mpiErrors /= 0) then |
383 |
|
|
status = -1 |
384 |
|
|
return |
385 |
|
|
endif |
386 |
|
|
|
387 |
|
|
call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, & |
388 |
|
|
mpi_sum, thisComponentPlan%columnComm, mpiErrors) |
389 |
|
|
if (mpiErrors /= 0) then |
390 |
|
|
status = -1 |
391 |
|
|
return |
392 |
|
|
endif |
393 |
|
|
|
394 |
|
|
thisComponentPlan%nAtomsInRow = nAtomsInRow |
395 |
|
|
thisComponentPlan%nAtomsInColumn = nAtomsInColumn |
396 |
|
|
thisComponentPlan%nGroupsInRow = nGroupsInRow |
397 |
|
|
thisComponentPlan%nGroupsInColumn = nGroupsInColumn |
398 |
|
|
|
399 |
|
|
end subroutine updateGridComponents |
400 |
|
|
|
401 |
|
|
|
402 |
|
|
!! Creates a square force decomposition of processors into row and column |
403 |
|
|
!! communicators. |
404 |
|
|
subroutine make_Force_Grid(thisComponentPlan,status) |
405 |
|
|
type (mpiComponentPlan) :: thisComponentPlan |
406 |
|
|
integer, intent(out) :: status !! status returns -1 if error |
407 |
|
|
integer :: nColumnsMax !! Maximum number of columns |
408 |
|
|
integer :: nWorldProcessors !! Total number of processors in World comm. |
409 |
|
|
integer :: rowIndex !! Row for this processor. |
410 |
|
|
integer :: columnIndex !! Column for this processor. |
411 |
|
|
integer :: nRows !! Total number of rows. |
412 |
|
|
integer :: nColumns !! Total number of columns. |
413 |
|
|
integer :: mpiErrors !! MPI errors. |
414 |
|
|
integer :: rowCommunicator !! MPI row communicator. |
415 |
|
|
integer :: columnCommunicator |
416 |
|
|
integer :: myWorldRank |
417 |
|
|
integer :: i |
418 |
|
|
|
419 |
gezelter |
507 |
|
420 |
chuckv |
126 |
if (.not. ComponentPlanSet) return |
421 |
|
|
status = 0 |
422 |
gezelter |
507 |
|
423 |
chuckv |
126 |
!! We make a dangerous assumption here that if numberProcessors is |
424 |
|
|
!! zero, then we need to get the information from MPI. |
425 |
|
|
if (thisComponentPlan%nProcessors == 0 ) then |
426 |
|
|
call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors) |
427 |
|
|
if ( mpiErrors /= 0 ) then |
428 |
|
|
status = -1 |
429 |
|
|
return |
430 |
|
|
endif |
431 |
|
|
call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors) |
432 |
|
|
if ( mpiErrors /= 0 ) then |
433 |
|
|
status = -1 |
434 |
|
|
return |
435 |
|
|
endif |
436 |
gezelter |
507 |
|
437 |
chuckv |
126 |
else |
438 |
|
|
nWorldProcessors = thisComponentPlan%nProcessors |
439 |
|
|
myWorldRank = thisComponentPlan%myNode |
440 |
|
|
endif |
441 |
gezelter |
507 |
|
442 |
chuckv |
126 |
nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp))) |
443 |
gezelter |
507 |
|
444 |
chuckv |
126 |
do i = 1, nColumnsMax |
445 |
|
|
if (mod(nWorldProcessors,i) == 0) nColumns = i |
446 |
|
|
end do |
447 |
gezelter |
507 |
|
448 |
chuckv |
126 |
nRows = nWorldProcessors/nColumns |
449 |
gezelter |
507 |
|
450 |
chuckv |
126 |
rowIndex = myWorldRank/nColumns |
451 |
gezelter |
507 |
|
452 |
chuckv |
126 |
call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors) |
453 |
|
|
if ( mpiErrors /= 0 ) then |
454 |
chuckv |
1113 |
write(errMsg, *) 'An error ',mpiErrors ,'occurred in splitting communicators' |
455 |
|
|
call handleError("makeForceGrid", errMsg) |
456 |
chuckv |
126 |
status = -1 |
457 |
|
|
return |
458 |
|
|
endif |
459 |
gezelter |
507 |
|
460 |
chuckv |
126 |
columnIndex = mod(myWorldRank,nColumns) |
461 |
|
|
call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors) |
462 |
|
|
if ( mpiErrors /= 0 ) then |
463 |
chuckv |
1113 |
write(errMsg, *) "MPI comm split faild at columnCommunicator by error ",mpiErrors |
464 |
|
|
call handleError("makeForceGrid", errMsg) |
465 |
chuckv |
126 |
status = -1 |
466 |
|
|
return |
467 |
|
|
endif |
468 |
gezelter |
507 |
|
469 |
chuckv |
126 |
! Set appropriate components of thisComponentPlan |
470 |
|
|
thisComponentPlan%rowComm = rowCommunicator |
471 |
|
|
thisComponentPlan%columnComm = columnCommunicator |
472 |
|
|
thisComponentPlan%rowIndex = rowIndex |
473 |
|
|
thisComponentPlan%columnIndex = columnIndex |
474 |
|
|
thisComponentPlan%nRows = nRows |
475 |
|
|
thisComponentPlan%nColumns = nColumns |
476 |
|
|
|
477 |
|
|
end subroutine make_Force_Grid |
478 |
gezelter |
507 |
|
479 |
chuckv |
126 |
!! initalizes a gather scatter plan |
480 |
|
|
subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, & |
481 |
|
|
thisComm, this_plan, status) |
482 |
|
|
integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan |
483 |
|
|
integer, intent(in) :: nObjects |
484 |
|
|
type (mpiComponentPlan), intent(in), target :: thisComponentPlan |
485 |
|
|
type (gs_plan), intent(out) :: this_plan !! MPI Component Plan |
486 |
|
|
integer, intent(in) :: thisComm !! MPI communicator for this plan |
487 |
|
|
|
488 |
|
|
integer :: arraySize !! size to allocate plan for |
489 |
|
|
integer, intent(out), optional :: status |
490 |
|
|
integer :: ierror |
491 |
|
|
integer :: i,junk |
492 |
|
|
|
493 |
|
|
if (present(status)) status = 0 |
494 |
|
|
|
495 |
gezelter |
507 |
!! Set gsComponentPlan pointer |
496 |
|
|
!! to the componet plan we want to use for this gather scatter plan. |
497 |
|
|
!! WARNING this could be dangerous since thisComponentPlan was origionally |
498 |
|
|
!! allocated in C++ and there is a significant difference between c and |
499 |
|
|
!! f95 pointers.... |
500 |
chuckv |
126 |
this_plan%gsComponentPlan => thisComponentPlan |
501 |
|
|
|
502 |
gezelter |
507 |
! Set this plan size for displs array. |
503 |
chuckv |
126 |
this_plan%gsPlanSize = nDim * nObjects |
504 |
|
|
|
505 |
gezelter |
507 |
! Duplicate communicator for this plan |
506 |
chuckv |
126 |
call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err) |
507 |
|
|
if (mpi_err /= 0) then |
508 |
|
|
if (present(status)) status = -1 |
509 |
|
|
return |
510 |
|
|
end if |
511 |
|
|
call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err) |
512 |
|
|
if (mpi_err /= 0) then |
513 |
|
|
if (present(status)) status = -1 |
514 |
|
|
return |
515 |
|
|
end if |
516 |
|
|
|
517 |
|
|
call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err) |
518 |
|
|
|
519 |
|
|
if (mpi_err /= 0) then |
520 |
|
|
if (present(status)) status = -1 |
521 |
|
|
return |
522 |
|
|
end if |
523 |
|
|
|
524 |
|
|
!! counts and displacements arrays are indexed from 0 to be compatable |
525 |
|
|
!! with MPI arrays. |
526 |
|
|
allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror) |
527 |
|
|
if (ierror /= 0) then |
528 |
|
|
if (present(status)) status = -1 |
529 |
|
|
return |
530 |
|
|
end if |
531 |
|
|
|
532 |
|
|
allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror) |
533 |
|
|
if (ierror /= 0) then |
534 |
|
|
if (present(status)) status = -1 |
535 |
|
|
return |
536 |
|
|
end if |
537 |
|
|
|
538 |
gezelter |
507 |
!! gather all the local sizes into a size # processors array. |
539 |
chuckv |
126 |
call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, & |
540 |
|
|
1,mpi_integer,thisComm,mpi_err) |
541 |
|
|
|
542 |
|
|
if (mpi_err /= 0) then |
543 |
|
|
if (present(status)) status = -1 |
544 |
|
|
return |
545 |
|
|
end if |
546 |
gezelter |
507 |
|
547 |
chuckv |
126 |
!! figure out the total number of particles in this plan |
548 |
|
|
this_plan%globalPlanSize = sum(this_plan%counts) |
549 |
gezelter |
507 |
|
550 |
chuckv |
126 |
!! initialize plan displacements. |
551 |
|
|
this_plan%displs(0) = 0 |
552 |
|
|
do i = 1, this_plan%planNprocs - 1,1 |
553 |
|
|
this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1) |
554 |
|
|
end do |
555 |
|
|
end subroutine plan_gather_scatter |
556 |
|
|
|
557 |
|
|
subroutine unplan_gather_scatter(this_plan) |
558 |
|
|
type (gs_plan), intent(inout) :: this_plan |
559 |
gezelter |
507 |
|
560 |
chuckv |
126 |
this_plan%gsComponentPlan => null() |
561 |
|
|
call mpi_comm_free(this_plan%myPlanComm,mpi_err) |
562 |
|
|
|
563 |
|
|
deallocate(this_plan%counts) |
564 |
|
|
deallocate(this_plan%displs) |
565 |
|
|
|
566 |
|
|
end subroutine unplan_gather_scatter |
567 |
|
|
|
568 |
|
|
subroutine gather_integer( sbuffer, rbuffer, this_plan, status) |
569 |
|
|
|
570 |
|
|
type (gs_plan), intent(inout) :: this_plan |
571 |
|
|
integer, dimension(:), intent(inout) :: sbuffer |
572 |
|
|
integer, dimension(:), intent(inout) :: rbuffer |
573 |
|
|
integer :: noffset |
574 |
|
|
integer, intent(out), optional :: status |
575 |
|
|
integer :: i |
576 |
|
|
|
577 |
|
|
if (present(status)) status = 0 |
578 |
|
|
noffset = this_plan%displs(this_plan%myPlanRank) |
579 |
|
|
|
580 |
gezelter |
507 |
! if (getmyNode() == 1) then |
581 |
|
|
! write(*,*) "Node 0 printing allgatherv vars" |
582 |
|
|
! write(*,*) "Noffset: ", noffset |
583 |
|
|
! write(*,*) "PlanSize: ", this_plan%gsPlanSize |
584 |
|
|
! write(*,*) "PlanComm: ", this_plan%myPlanComm |
585 |
|
|
! end if |
586 |
chuckv |
126 |
|
587 |
|
|
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, & |
588 |
|
|
rbuffer,this_plan%counts,this_plan%displs,mpi_integer, & |
589 |
|
|
this_plan%myPlanComm, mpi_err) |
590 |
|
|
|
591 |
|
|
if (mpi_err /= 0) then |
592 |
chuckv |
1113 |
write(errMsg, *) "mpi_allgatherv failed by error message ",mpi_err |
593 |
|
|
call handleError("gather_integer", errMsg) |
594 |
gezelter |
507 |
if (present(status)) status = -1 |
595 |
chuckv |
126 |
endif |
596 |
|
|
|
597 |
|
|
end subroutine gather_integer |
598 |
|
|
|
599 |
|
|
subroutine gather_double( sbuffer, rbuffer, this_plan, status) |
600 |
|
|
|
601 |
|
|
type (gs_plan), intent(in) :: this_plan |
602 |
|
|
real( kind = DP ), dimension(:), intent(inout) :: sbuffer |
603 |
|
|
real( kind = DP ), dimension(:), intent(inout) :: rbuffer |
604 |
|
|
integer :: noffset |
605 |
|
|
integer, intent(out), optional :: status |
606 |
|
|
|
607 |
|
|
|
608 |
|
|
if (present(status)) status = 0 |
609 |
|
|
noffset = this_plan%displs(this_plan%myPlanRank) |
610 |
|
|
#ifdef PROFILE |
611 |
|
|
call cpu_time(commTimeInitial) |
612 |
|
|
#endif |
613 |
gezelter |
962 |
#ifdef SINGLE_PRECISION |
614 |
|
|
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_real, & |
615 |
|
|
rbuffer,this_plan%counts,this_plan%displs,mpi_real, & |
616 |
|
|
this_plan%myPlanComm, mpi_err) |
617 |
|
|
#else |
618 |
chuckv |
126 |
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, & |
619 |
|
|
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
620 |
|
|
this_plan%myPlanComm, mpi_err) |
621 |
gezelter |
962 |
#endif |
622 |
chuckv |
126 |
#ifdef PROFILE |
623 |
|
|
call cpu_time(commTimeFinal) |
624 |
|
|
commTime = commTime + commTimeFinal - commTimeInitial |
625 |
|
|
#endif |
626 |
|
|
|
627 |
|
|
if (mpi_err /= 0) then |
628 |
chuckv |
1113 |
write(errMsg, *) "mpi_allgatherv failed by error message ",mpi_err |
629 |
|
|
call handleError("gather_double", errMsg) |
630 |
gezelter |
507 |
if (present(status)) status = -1 |
631 |
chuckv |
126 |
endif |
632 |
|
|
|
633 |
|
|
end subroutine gather_double |
634 |
|
|
|
635 |
|
|
subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status) |
636 |
|
|
|
637 |
|
|
type (gs_plan), intent(in) :: this_plan |
638 |
|
|
real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer |
639 |
|
|
real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer |
640 |
|
|
integer :: noffset,i,ierror |
641 |
|
|
integer, intent(out), optional :: status |
642 |
gezelter |
507 |
|
643 |
chuckv |
126 |
external mpi_allgatherv |
644 |
|
|
|
645 |
gezelter |
507 |
if (present(status)) status = 0 |
646 |
|
|
|
647 |
|
|
! noffset = this_plan%displs(this_plan%me) |
648 |
chuckv |
126 |
#ifdef PROFILE |
649 |
gezelter |
507 |
call cpu_time(commTimeInitial) |
650 |
chuckv |
126 |
#endif |
651 |
|
|
|
652 |
gezelter |
962 |
#ifdef SINGLE_PRECISION |
653 |
|
|
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_real, & |
654 |
|
|
rbuffer,this_plan%counts,this_plan%displs,mpi_real, & |
655 |
|
|
this_plan%myPlanComm, mpi_err) |
656 |
|
|
#else |
657 |
chuckv |
126 |
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, & |
658 |
gezelter |
507 |
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
659 |
|
|
this_plan%myPlanComm, mpi_err) |
660 |
gezelter |
962 |
#endif |
661 |
chuckv |
126 |
|
662 |
|
|
#ifdef PROFILE |
663 |
|
|
call cpu_time(commTimeFinal) |
664 |
|
|
commTime = commTime + commTimeFinal - commTimeInitial |
665 |
|
|
#endif |
666 |
|
|
|
667 |
|
|
if (mpi_err /= 0) then |
668 |
chuckv |
1113 |
write(errMsg, *) "mpi_allgatherv failed by error message ",mpi_err |
669 |
|
|
call handleError("gather_double_2d", errMsg) |
670 |
gezelter |
507 |
if (present(status)) status = -1 |
671 |
chuckv |
126 |
endif |
672 |
|
|
|
673 |
gezelter |
507 |
end subroutine gather_double_2d |
674 |
chuckv |
126 |
|
675 |
|
|
subroutine scatter_double( sbuffer, rbuffer, this_plan, status) |
676 |
|
|
|
677 |
|
|
type (gs_plan), intent(in) :: this_plan |
678 |
|
|
real( kind = DP ), dimension(:), intent(inout) :: sbuffer |
679 |
|
|
real( kind = DP ), dimension(:), intent(inout) :: rbuffer |
680 |
|
|
integer, intent(out), optional :: status |
681 |
|
|
external mpi_reduce_scatter |
682 |
|
|
|
683 |
gezelter |
507 |
if (present(status)) status = 0 |
684 |
chuckv |
126 |
|
685 |
|
|
#ifdef PROFILE |
686 |
gezelter |
507 |
call cpu_time(commTimeInitial) |
687 |
chuckv |
126 |
#endif |
688 |
gezelter |
962 |
#ifdef SINGLE_PRECISION |
689 |
chuckv |
126 |
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
690 |
gezelter |
962 |
mpi_real, MPI_SUM, this_plan%myPlanComm, mpi_err) |
691 |
|
|
#else |
692 |
|
|
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
693 |
chuckv |
126 |
mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err) |
694 |
gezelter |
962 |
#endif |
695 |
chuckv |
126 |
#ifdef PROFILE |
696 |
|
|
call cpu_time(commTimeFinal) |
697 |
|
|
commTime = commTime + commTimeFinal - commTimeInitial |
698 |
|
|
#endif |
699 |
|
|
|
700 |
|
|
if (mpi_err /= 0) then |
701 |
chuckv |
1113 |
write(errMsg, *) "mpi_reduce_scatter failed by error message ",mpi_err |
702 |
|
|
call handleError("scatter_double", errMsg) |
703 |
gezelter |
507 |
if (present(status)) status = -1 |
704 |
chuckv |
126 |
endif |
705 |
|
|
|
706 |
|
|
end subroutine scatter_double |
707 |
|
|
|
708 |
|
|
subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status) |
709 |
|
|
|
710 |
|
|
type (gs_plan), intent(in) :: this_plan |
711 |
|
|
real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer |
712 |
|
|
real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer |
713 |
|
|
integer, intent(out), optional :: status |
714 |
|
|
external mpi_reduce_scatter |
715 |
gezelter |
507 |
|
716 |
|
|
if (present(status)) status = 0 |
717 |
chuckv |
126 |
#ifdef PROFILE |
718 |
gezelter |
507 |
call cpu_time(commTimeInitial) |
719 |
chuckv |
126 |
#endif |
720 |
gezelter |
962 |
#ifdef SINGLE_PRECISION |
721 |
chuckv |
126 |
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
722 |
gezelter |
962 |
mpi_real, MPI_SUM, this_plan%myPlanComm, mpi_err) |
723 |
|
|
#else |
724 |
|
|
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
725 |
chuckv |
126 |
mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err) |
726 |
gezelter |
962 |
#endif |
727 |
chuckv |
126 |
#ifdef PROFILE |
728 |
|
|
call cpu_time(commTimeFinal) |
729 |
|
|
commTime = commTime + commTimeFinal - commTimeInitial |
730 |
|
|
#endif |
731 |
|
|
|
732 |
|
|
if (mpi_err /= 0) then |
733 |
chuckv |
1113 |
write(errMsg, *) "mpi_reduce_scatter failed by error message ",mpi_err |
734 |
|
|
call handleError("scatter_double_2d", errMsg) |
735 |
gezelter |
507 |
if (present(status)) status = -1 |
736 |
chuckv |
126 |
endif |
737 |
|
|
|
738 |
|
|
end subroutine scatter_double_2d |
739 |
gezelter |
507 |
|
740 |
chuckv |
126 |
subroutine setAtomTags(tags, status) |
741 |
|
|
integer, dimension(:) :: tags |
742 |
|
|
integer :: status |
743 |
|
|
|
744 |
|
|
integer :: alloc_stat |
745 |
gezelter |
507 |
|
746 |
chuckv |
126 |
integer :: nAtomsInCol |
747 |
|
|
integer :: nAtomsInRow |
748 |
gezelter |
1346 |
integer :: i, glob, nAtomTags, maxGlobal |
749 |
|
|
|
750 |
chuckv |
126 |
|
751 |
|
|
status = 0 |
752 |
gezelter |
507 |
! allocate row arrays |
753 |
chuckv |
126 |
nAtomsInRow = getNatomsInRow(plan_atom_row) |
754 |
|
|
nAtomsInCol = getNatomsInCol(plan_atom_col) |
755 |
gezelter |
507 |
|
756 |
chuckv |
126 |
if(.not. allocated(AtomLocalToGlobal)) then |
757 |
|
|
allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat) |
758 |
gezelter |
507 |
if (alloc_stat /= 0 ) then |
759 |
chuckv |
126 |
status = -1 |
760 |
|
|
return |
761 |
|
|
endif |
762 |
|
|
else |
763 |
|
|
deallocate(AtomLocalToGlobal) |
764 |
|
|
allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat) |
765 |
|
|
if (alloc_stat /= 0 ) then |
766 |
|
|
status = -1 |
767 |
|
|
return |
768 |
|
|
endif |
769 |
|
|
|
770 |
|
|
endif |
771 |
|
|
|
772 |
|
|
AtomLocalToGlobal = tags |
773 |
|
|
|
774 |
|
|
if (.not. allocated(AtomRowToGlobal)) then |
775 |
|
|
allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat) |
776 |
|
|
if (alloc_stat /= 0 ) then |
777 |
|
|
status = -1 |
778 |
|
|
return |
779 |
|
|
endif |
780 |
|
|
else |
781 |
|
|
deallocate(AtomRowToGlobal) |
782 |
|
|
allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat) |
783 |
|
|
if (alloc_stat /= 0 ) then |
784 |
|
|
status = -1 |
785 |
|
|
return |
786 |
|
|
endif |
787 |
|
|
|
788 |
|
|
endif |
789 |
gezelter |
507 |
! allocate column arrays |
790 |
chuckv |
126 |
if (.not. allocated(AtomColToGlobal)) then |
791 |
|
|
allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat) |
792 |
|
|
if (alloc_stat /= 0 ) then |
793 |
|
|
status = -1 |
794 |
|
|
return |
795 |
|
|
endif |
796 |
|
|
else |
797 |
|
|
deallocate(AtomColToGlobal) |
798 |
|
|
allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat) |
799 |
|
|
if (alloc_stat /= 0 ) then |
800 |
|
|
status = -1 |
801 |
|
|
return |
802 |
|
|
endif |
803 |
|
|
endif |
804 |
gezelter |
507 |
|
805 |
chuckv |
126 |
call gather(tags, AtomRowToGlobal, plan_atom_row) |
806 |
|
|
call gather(tags, AtomColToGlobal, plan_atom_col) |
807 |
gezelter |
507 |
|
808 |
gezelter |
1346 |
nAtomTags = size(tags) |
809 |
|
|
maxGlobal = -1 |
810 |
|
|
do i = 1, nAtomTags |
811 |
|
|
if (tags(i).gt.maxGlobal) maxGlobal = tags(i) |
812 |
|
|
enddo |
813 |
|
|
|
814 |
|
|
if(.not. allocated(AtomGlobalToLocal)) then |
815 |
|
|
allocate(AtomGlobalToLocal(maxGlobal),STAT=alloc_stat) |
816 |
|
|
if (alloc_stat /= 0 ) then |
817 |
|
|
status = -1 |
818 |
|
|
return |
819 |
|
|
endif |
820 |
|
|
else |
821 |
|
|
deallocate(AtomGlobalToLocal) |
822 |
|
|
allocate(AtomGlobalToLocal(maxGlobal),STAT=alloc_stat) |
823 |
|
|
if (alloc_stat /= 0 ) then |
824 |
|
|
status = -1 |
825 |
|
|
return |
826 |
|
|
endif |
827 |
|
|
|
828 |
|
|
endif |
829 |
|
|
|
830 |
|
|
do i = 1, nAtomTags |
831 |
|
|
glob = AtomLocalToGlobal(i) |
832 |
|
|
AtomGlobalToLocal(glob) = i |
833 |
|
|
enddo |
834 |
|
|
|
835 |
chuckv |
126 |
end subroutine setAtomTags |
836 |
|
|
|
837 |
|
|
subroutine setGroupTags(tags, status) |
838 |
|
|
integer, dimension(:) :: tags |
839 |
|
|
integer :: status |
840 |
|
|
|
841 |
|
|
integer :: alloc_stat |
842 |
gezelter |
507 |
|
843 |
chuckv |
126 |
integer :: nGroupsInCol |
844 |
|
|
integer :: nGroupsInRow |
845 |
|
|
|
846 |
|
|
status = 0 |
847 |
|
|
|
848 |
|
|
nGroupsInRow = getNgroupsInRow(plan_group_row) |
849 |
|
|
nGroupsInCol = getNgroupsInCol(plan_group_col) |
850 |
gezelter |
507 |
|
851 |
chuckv |
126 |
if(allocated(GroupLocalToGlobal)) then |
852 |
|
|
deallocate(GroupLocalToGlobal) |
853 |
|
|
endif |
854 |
|
|
allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat) |
855 |
|
|
if (alloc_stat /= 0 ) then |
856 |
|
|
status = -1 |
857 |
|
|
return |
858 |
|
|
endif |
859 |
gezelter |
507 |
|
860 |
chuckv |
126 |
GroupLocalToGlobal = tags |
861 |
|
|
|
862 |
|
|
if(allocated(GroupRowToGlobal)) then |
863 |
|
|
deallocate(GroupRowToGlobal) |
864 |
|
|
endif |
865 |
|
|
allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat) |
866 |
|
|
if (alloc_stat /= 0 ) then |
867 |
|
|
status = -1 |
868 |
|
|
return |
869 |
|
|
endif |
870 |
|
|
|
871 |
|
|
if(allocated(GroupColToGlobal)) then |
872 |
|
|
deallocate(GroupColToGlobal) |
873 |
|
|
endif |
874 |
|
|
allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat) |
875 |
|
|
if (alloc_stat /= 0 ) then |
876 |
|
|
status = -1 |
877 |
|
|
return |
878 |
|
|
endif |
879 |
gezelter |
507 |
|
880 |
chuckv |
126 |
call gather(tags, GroupRowToGlobal, plan_group_row) |
881 |
|
|
call gather(tags, GroupColToGlobal, plan_group_col) |
882 |
gezelter |
507 |
|
883 |
chuckv |
126 |
end subroutine setGroupTags |
884 |
gezelter |
507 |
|
885 |
chuckv |
126 |
function getNatomsInCol(thisplan) result(nInCol) |
886 |
|
|
type (gs_plan), intent(in) :: thisplan |
887 |
|
|
integer :: nInCol |
888 |
|
|
nInCol = thisplan%gsComponentPlan%nAtomsInColumn |
889 |
|
|
end function getNatomsInCol |
890 |
|
|
|
891 |
|
|
function getNatomsInRow(thisplan) result(nInRow) |
892 |
|
|
type (gs_plan), intent(in) :: thisplan |
893 |
|
|
integer :: nInRow |
894 |
|
|
nInRow = thisplan%gsComponentPlan%nAtomsInRow |
895 |
|
|
end function getNatomsInRow |
896 |
gezelter |
507 |
|
897 |
chuckv |
126 |
function getNgroupsInCol(thisplan) result(nInCol) |
898 |
|
|
type (gs_plan), intent(in) :: thisplan |
899 |
|
|
integer :: nInCol |
900 |
|
|
nInCol = thisplan%gsComponentPlan%nGroupsInColumn |
901 |
|
|
end function getNgroupsInCol |
902 |
|
|
|
903 |
|
|
function getNgroupsInRow(thisplan) result(nInRow) |
904 |
|
|
type (gs_plan), intent(in) :: thisplan |
905 |
|
|
integer :: nInRow |
906 |
|
|
nInRow = thisplan%gsComponentPlan%nGroupsInRow |
907 |
|
|
end function getNgroupsInRow |
908 |
gezelter |
507 |
|
909 |
chuckv |
126 |
function isMPISimSet() result(isthisSimSet) |
910 |
|
|
logical :: isthisSimSet |
911 |
|
|
if (isSimSet) then |
912 |
|
|
isthisSimSet = .true. |
913 |
|
|
else |
914 |
|
|
isthisSimSet = .false. |
915 |
|
|
endif |
916 |
|
|
end function isMPISimSet |
917 |
gezelter |
507 |
|
918 |
chuckv |
126 |
subroutine printComponentPlan(this_plan,printNode) |
919 |
|
|
|
920 |
|
|
type (mpiComponentPlan), intent(in) :: this_plan |
921 |
|
|
integer, optional :: printNode |
922 |
|
|
logical :: print_me = .false. |
923 |
|
|
|
924 |
|
|
if (present(printNode)) then |
925 |
|
|
if (printNode == mpiSim%myNode) print_me = .true. |
926 |
|
|
else |
927 |
|
|
print_me = .true. |
928 |
|
|
endif |
929 |
|
|
|
930 |
|
|
if (print_me) then |
931 |
|
|
write(default_error,*) "SetupSimParallel: writing component plan" |
932 |
gezelter |
507 |
|
933 |
chuckv |
126 |
write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal |
934 |
|
|
write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal |
935 |
|
|
write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal |
936 |
|
|
write(default_error,*) "myNode: ", mpiSim%myNode |
937 |
|
|
write(default_error,*) "nProcessors: ", mpiSim%nProcessors |
938 |
|
|
write(default_error,*) "rowComm: ", mpiSim%rowComm |
939 |
|
|
write(default_error,*) "columnComm: ", mpiSim%columnComm |
940 |
|
|
write(default_error,*) "nRows: ", mpiSim%nRows |
941 |
|
|
write(default_error,*) "nColumns: ", mpiSim%nColumns |
942 |
|
|
write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow |
943 |
|
|
write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn |
944 |
|
|
write(default_error,*) "rowIndex: ", mpiSim%rowIndex |
945 |
|
|
write(default_error,*) "columnIndex: ", mpiSim%columnIndex |
946 |
|
|
endif |
947 |
|
|
end subroutine printComponentPlan |
948 |
|
|
|
949 |
|
|
function getMyNode() result(myNode) |
950 |
|
|
integer :: myNode |
951 |
|
|
myNode = mpiSim%myNode |
952 |
|
|
end function getMyNode |
953 |
|
|
|
954 |
|
|
#ifdef PROFILE |
955 |
|
|
subroutine printCommTime() |
956 |
|
|
write(*,*) "MPI communication time is: ", commTime |
957 |
|
|
end subroutine printCommTime |
958 |
|
|
|
959 |
|
|
function getCommTime() result(comm_time) |
960 |
|
|
real :: comm_time |
961 |
|
|
comm_time = commTime |
962 |
|
|
end function getCommTime |
963 |
|
|
|
964 |
|
|
#endif |
965 |
|
|
|
966 |
|
|
#endif // is_mpi |
967 |
|
|
end module mpiSimulation |
968 |
|
|
|