5 |
|
!! |
6 |
|
!! @author Charles F. Vardeman II |
7 |
|
!! @author Matthew Meineke |
8 |
< |
!! @version $Id: mpiSimulation_module.F90,v 1.1 2002-12-19 21:59:51 chuckv Exp $, $Date: 2002-12-19 21:59:51 $, $Name: not supported by cvs2svn $, $Revision: 1.1 $ |
8 |
> |
!! @version $Id: mpiSimulation_module.F90,v 1.6 2003-01-30 22:29:58 chuckv Exp $, $Date: 2003-01-30 22:29:58 $, $Name: not supported by cvs2svn $, $Revision: 1.6 $ |
9 |
|
|
10 |
|
|
11 |
|
|
12 |
|
|
13 |
|
module mpiSimulation |
14 |
< |
#ifdef MPI |
14 |
> |
use definitions |
15 |
|
use mpi |
16 |
|
implicit none |
17 |
|
PRIVATE |
23 |
|
public :: gather, scatter |
24 |
|
public :: setupSimParallel |
25 |
|
public :: replanSimParallel |
26 |
+ |
public :: getNcol |
27 |
+ |
public :: getNrow |
28 |
+ |
public :: isMPISimSet |
29 |
+ |
|
30 |
+ |
|
31 |
|
!! PUBLIC Subroutines contained in MPI module |
32 |
|
public :: mpi_bcast |
33 |
|
public :: mpi_allreduce |
55 |
|
!! generic mpi error declaration. |
56 |
|
integer,public :: mpi_err |
57 |
|
|
58 |
+ |
|
59 |
+ |
|
60 |
|
!! Include mpiComponentPlan type. mpiComponentPlan is a |
61 |
|
!! dual header file for both c and fortran. |
62 |
|
#define __FORTRAN90 |
63 |
|
#include "mpiComponentPlan.h" |
64 |
|
|
65 |
+ |
|
66 |
+ |
|
67 |
+ |
!! Tags used during force loop for parallel simulation |
68 |
+ |
integer, allocatable, dimension(:) :: tagLocal |
69 |
+ |
integer, public, allocatable, dimension(:) :: tagRow |
70 |
+ |
integer ,public, allocatable, dimension(:) :: tagColumn |
71 |
+ |
|
72 |
+ |
!! Logical set true if mpiSimulation has been initialized |
73 |
+ |
logical :: isSimSet = .false. |
74 |
+ |
|
75 |
+ |
|
76 |
+ |
type (mpiComponentPlan) :: mpiSim |
77 |
+ |
|
78 |
|
!! gs_plan contains plans for gather and scatter routines |
79 |
|
type, public :: gs_plan |
80 |
|
private |
117 |
|
end interface |
118 |
|
|
119 |
|
|
120 |
+ |
|
121 |
|
contains |
122 |
|
|
123 |
|
!! Sets up mpiComponentPlan with structure passed from C++. |
124 |
< |
subroutine setupSimParallel(thisComponentPlan,status) |
124 |
> |
subroutine setupSimParallel(thisComponentPlan,ntags,tags,status) |
125 |
|
! Passed Arguments |
126 |
|
! integer, intent(inout) :: nDim !! Number of dimensions |
127 |
|
!! mpiComponentPlan struct from C |
128 |
< |
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
128 |
> |
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
129 |
> |
!! Number of tags passed, nlocal |
130 |
> |
integer, intent(in) :: ntags |
131 |
> |
!! Result status, 0 = normal, -1 = error |
132 |
|
integer, intent(out) :: status |
133 |
< |
integer, intnet(out) :: localStatus |
133 |
> |
integer :: localStatus |
134 |
> |
!! Global reference tag for local particles |
135 |
> |
integer, dimension(ntags),intent(inout) :: tags |
136 |
|
|
137 |
+ |
|
138 |
|
status = 0 |
139 |
|
if (componentPlanSet) then |
140 |
|
return |
141 |
|
endif |
115 |
– |
|
142 |
|
componentPlanSet = .true. |
143 |
+ |
|
144 |
+ |
!! copy c component plan to fortran |
145 |
+ |
mpiSim = thisComponentPlan |
146 |
+ |
write(*,*) "Seting up simParallel" |
147 |
+ |
if (mpiSim%myNode == 0) then |
148 |
+ |
write(*,*) "SetupSimParallel: writing component plan" |
149 |
+ |
|
150 |
+ |
write(*,*) "nMolGlobal: ", mpiSim%nMolGlobal |
151 |
+ |
write(*,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal |
152 |
+ |
write(*,*) "nBondGlobal: ", mpiSim%nBondsGlobal |
153 |
+ |
write(*,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal |
154 |
+ |
write(*,*) "nSRIGlobal: ", mpiSim%nSRIGlobal |
155 |
+ |
write(*,*) "myMolStart: ", mpiSim%myMolStart |
156 |
+ |
write(*,*) "myMolEnd: ", mpiSim%myMolEnd |
157 |
+ |
write(*,*) "myMol: ", mpiSim%myMol |
158 |
+ |
write(*,*) "myNlocal: ", mpiSim%myNlocal |
159 |
+ |
write(*,*) "myNode: ", mpiSim%myNode |
160 |
+ |
write(*,*) "numberProcessors: ", mpiSim%numberProcessors |
161 |
+ |
write(*,*) "rowComm: ", mpiSim%rowComm |
162 |
+ |
write(*,*) "columnComm: ", mpiSim%columnComm |
163 |
+ |
write(*,*) "numberRows: ", mpiSim%numberRows |
164 |
+ |
write(*,*) "numberColumns: ", mpiSim%numberColumns |
165 |
+ |
write(*,*) "nComponentsRow: ", mpiSim%nComponentsRow |
166 |
+ |
write(*,*) "nComponentsColumn: ", mpiSim%nComponentsColumn |
167 |
+ |
write(*,*) "rowIndex: ", mpiSim%rowIndex |
168 |
+ |
write(*,*) "columnIndex: ", mpiSim%columnIndex |
169 |
+ |
endif |
170 |
|
|
171 |
< |
call make_Force_Grid(thisComponentPlan,localStatus) |
171 |
> |
|
172 |
> |
call make_Force_Grid(mpiSim,localStatus) |
173 |
|
if (localStatus /= 0) then |
174 |
+ |
write(default_error,*) "Error creating force grid" |
175 |
|
status = -1 |
176 |
|
return |
177 |
|
endif |
178 |
|
|
179 |
< |
call updateGridComponents(thisComponentPlan,localStatus) |
179 |
> |
call updateGridComponents(mpiSim,localStatus) |
180 |
|
if (localStatus /= 0) then |
181 |
+ |
write(default_error,*) "Error updating grid components" |
182 |
|
status = -1 |
183 |
|
return |
184 |
|
endif |
185 |
|
|
186 |
+ |
|
187 |
|
!! initialize gather and scatter plans used in this simulation |
188 |
< |
call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,& |
189 |
< |
thisComponentPlan,row_comm,plan_row) |
190 |
< |
call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,& |
191 |
< |
thisComponentPlan,row_comm,plan_row3d) |
192 |
< |
call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,& |
193 |
< |
thisComponentPlan,col_comm,plan_col) |
194 |
< |
call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,& |
195 |
< |
thisComponentPlan,col_comm,plan_col3d) |
188 |
> |
call plan_gather_scatter(1,mpiSim%nComponentsRow,& |
189 |
> |
mpiSim,mpiSim%rowComm,plan_row) |
190 |
> |
call plan_gather_scatter(nDim,mpiSim%nComponentsRow,& |
191 |
> |
mpiSim,mpiSim%rowComm,plan_row3d) |
192 |
> |
call plan_gather_scatter(1,mpiSim%nComponentsColumn,& |
193 |
> |
mpiSim,mpiSim%columnComm,plan_col) |
194 |
> |
call plan_gather_scatter(nDim,mpiSim%nComponentsColumn,& |
195 |
> |
mpiSim,mpiSim%columnComm,plan_col3d) |
196 |
|
|
197 |
< |
|
198 |
< |
|
197 |
> |
! Initialize tags |
198 |
> |
call setTags(tags,localStatus) |
199 |
> |
if (localStatus /= 0) then |
200 |
> |
status = -1 |
201 |
> |
return |
202 |
> |
endif |
203 |
> |
isSimSet = .true. |
204 |
|
end subroutine setupSimParallel |
205 |
|
|
206 |
|
subroutine replanSimParallel(thisComponentPlan,status) |
208 |
|
!! mpiComponentPlan struct from C |
209 |
|
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
210 |
|
integer, intent(out) :: status |
211 |
< |
integer, intnet(out) :: localStatus |
212 |
< |
|
211 |
> |
integer :: localStatus |
212 |
> |
integer :: mpierror |
213 |
|
status = 0 |
214 |
|
|
215 |
|
call updateGridComponents(thisComponentPlan,localStatus) |
227 |
|
|
228 |
|
!! initialize gather and scatter plans used in this simulation |
229 |
|
call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,& |
230 |
< |
thisComponentPlan,row_comm,plan_row) |
230 |
> |
thisComponentPlan,thisComponentPlan%rowComm,plan_row) |
231 |
|
call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,& |
232 |
< |
thisComponentPlan,row_comm,plan_row3d) |
232 |
> |
thisComponentPlan,thisComponentPlan%rowComm,plan_row3d) |
233 |
|
call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,& |
234 |
< |
thisComponentPlan,col_comm,plan_col) |
234 |
> |
thisComponentPlan,thisComponentPlan%columnComm,plan_col) |
235 |
|
call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,& |
236 |
< |
thisComponentPlan,col_comm,plan_col3d) |
236 |
> |
thisComponentPlan,thisComponentPlan%rowComm,plan_col3d) |
237 |
|
|
238 |
|
|
239 |
|
|
249 |
|
integer, intent(out) :: status |
250 |
|
integer :: nComponentsLocal |
251 |
|
integer :: nComponentsRow = 0 |
252 |
< |
integer :: nComponensColumn = 0 |
252 |
> |
integer :: nComponentsColumn = 0 |
253 |
|
integer :: mpiErrors |
254 |
|
|
255 |
|
status = 0 |
263 |
|
nComponentsLocal = thisComponentPlan%myNlocal |
264 |
|
|
265 |
|
call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,& |
266 |
< |
mpi_sum,thisComponentPlan%rowComm,mpiError) |
266 |
> |
mpi_sum,thisComponentPlan%rowComm,mpiErrors) |
267 |
|
if (mpiErrors /= 0) then |
268 |
|
status = -1 |
269 |
|
return |
270 |
|
endif |
271 |
|
|
272 |
|
call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, & |
273 |
< |
mpi_sum,thisComponentPlan%columnComm,mpiError) |
273 |
> |
mpi_sum,thisComponentPlan%columnComm,mpiErrors) |
274 |
|
if (mpiErrors /= 0) then |
275 |
|
status = -1 |
276 |
|
return |
303 |
|
|
304 |
|
if (.not. ComponentPlanSet) return |
305 |
|
status = 0 |
306 |
< |
|
306 |
> |
|
307 |
|
!! We make a dangerous assumption here that if numberProcessors is |
308 |
|
!! zero, then we need to get the information from MPI. |
309 |
|
if (thisComponentPlan%numberProcessors == 0 ) then |
320 |
|
|
321 |
|
else |
322 |
|
nWorldProcessors = thisComponentPlan%numberProcessors |
323 |
< |
myWorldRank = thisComponentPlan%myRank |
323 |
> |
myWorldRank = thisComponentPlan%myNode |
324 |
|
endif |
325 |
|
|
326 |
|
|
335 |
|
nRows = nWorldProcessors/nColumns |
336 |
|
|
337 |
|
rowIndex = myWorldRank/nColumns |
338 |
< |
call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError) |
338 |
> |
|
339 |
> |
|
340 |
> |
|
341 |
> |
call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors) |
342 |
|
if ( mpiErrors /= 0 ) then |
343 |
+ |
write(default_error,*) "MPI comm split failed at row communicator" |
344 |
|
status = -1 |
345 |
|
return |
346 |
|
endif |
347 |
|
|
348 |
|
columnIndex = mod(myWorldRank,nColumns) |
349 |
< |
call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError) |
349 |
> |
call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors) |
350 |
|
if ( mpiErrors /= 0 ) then |
351 |
+ |
write(default_error,*) "MPI comm split faild at columnCommunicator" |
352 |
|
status = -1 |
353 |
|
return |
354 |
|
endif |
387 |
|
!! WARNING this could be dangerous since thisComponentPlan was origionally |
388 |
|
!! allocated in C++ and there is a significant difference between c and |
389 |
|
!! f95 pointers.... |
390 |
< |
gsComponentPlan => thisComponetPlan |
390 |
> |
this_plan%gsComponentPlan => thisComponentPlan |
391 |
|
|
392 |
|
! Set this plan size for displs array. |
393 |
|
this_plan%gsPlanSize = nDim * nComponents |
426 |
|
end if |
427 |
|
|
428 |
|
!! gather all the local sizes into a size # processors array. |
429 |
< |
call mpi_allgather(gs_plan%gsPlanSize,1,mpi_integer,this_plan%counts, & |
430 |
< |
1,mpi_integer,comm,mpi_err) |
429 |
> |
call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, & |
430 |
> |
1,mpi_integer,thisComm,mpi_err) |
431 |
|
|
432 |
|
if (mpi_err /= 0) then |
433 |
|
if (present(status)) status = -1 |
453 |
|
|
454 |
|
|
455 |
|
this_plan%gsComponentPlan => null() |
456 |
< |
call mpi_comm_free(this_plan%comm,mpi_err) |
456 |
> |
call mpi_comm_free(this_plan%myPlanComm,mpi_err) |
457 |
|
|
458 |
|
deallocate(this_plan%counts) |
459 |
|
deallocate(this_plan%displs) |
467 |
|
integer, dimension(:), intent(in) :: rbuffer |
468 |
|
integer :: noffset |
469 |
|
integer, intent(out), optional :: status |
470 |
+ |
integer :: i |
471 |
|
|
472 |
+ |
|
473 |
|
if (present(status)) status = 0 |
474 |
|
noffset = this_plan%displs(this_plan%myPlanRank) |
475 |
|
|
492 |
|
integer, intent(out), optional :: status |
493 |
|
|
494 |
|
if (present(status)) status = 0 |
495 |
< |
noffset = this_plan%displs(this_plan%me) |
495 |
> |
noffset = this_plan%displs(this_plan%myPlanRank) |
496 |
|
|
497 |
|
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, & |
498 |
|
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
564 |
|
endif |
565 |
|
|
566 |
|
end subroutine scatter_double_2d |
567 |
+ |
|
568 |
+ |
|
569 |
+ |
subroutine setTags(tags,status) |
570 |
+ |
integer, dimension(:) :: tags |
571 |
+ |
integer :: status |
572 |
+ |
|
573 |
+ |
integer :: alloc_stat |
574 |
+ |
|
575 |
+ |
integer :: ncol |
576 |
+ |
integer :: nrow |
577 |
+ |
|
578 |
+ |
status = 0 |
579 |
+ |
! allocate row arrays |
580 |
+ |
nrow = getNrow(plan_row) |
581 |
+ |
ncol = getNcol(plan_col) |
582 |
+ |
|
583 |
+ |
if (.not. allocated(tagRow)) then |
584 |
+ |
allocate(tagRow(nrow),STAT=alloc_stat) |
585 |
+ |
if (alloc_stat /= 0 ) then |
586 |
+ |
status = -1 |
587 |
+ |
return |
588 |
+ |
endif |
589 |
+ |
else |
590 |
+ |
deallocate(tagRow) |
591 |
+ |
allocate(tagRow(nrow),STAT=alloc_stat) |
592 |
+ |
if (alloc_stat /= 0 ) then |
593 |
+ |
status = -1 |
594 |
+ |
return |
595 |
+ |
endif |
596 |
|
|
597 |
+ |
endif |
598 |
+ |
! allocate column arrays |
599 |
+ |
if (.not. allocated(tagColumn)) then |
600 |
+ |
allocate(tagColumn(ncol),STAT=alloc_stat) |
601 |
+ |
if (alloc_stat /= 0 ) then |
602 |
+ |
status = -1 |
603 |
+ |
return |
604 |
+ |
endif |
605 |
+ |
else |
606 |
+ |
deallocate(tagColumn) |
607 |
+ |
allocate(tagColumn(ncol),STAT=alloc_stat) |
608 |
+ |
if (alloc_stat /= 0 ) then |
609 |
+ |
status = -1 |
610 |
+ |
return |
611 |
+ |
endif |
612 |
+ |
endif |
613 |
+ |
|
614 |
+ |
call gather(tags,tagRow,plan_row) |
615 |
+ |
call gather(tags,tagColumn,plan_col) |
616 |
+ |
|
617 |
+ |
|
618 |
+ |
end subroutine setTags |
619 |
+ |
|
620 |
+ |
pure function getNcol(thisplan) result(ncol) |
621 |
+ |
type (gs_plan), intent(in) :: thisplan |
622 |
+ |
integer :: ncol |
623 |
+ |
ncol = thisplan%gsComponentPlan%nComponentsColumn |
624 |
+ |
end function getNcol |
625 |
+ |
|
626 |
+ |
pure function getNrow(thisplan) result(ncol) |
627 |
+ |
type (gs_plan), intent(in) :: thisplan |
628 |
+ |
integer :: ncol |
629 |
+ |
ncol = thisplan%gsComponentPlan%nComponentsrow |
630 |
+ |
end function getNrow |
631 |
+ |
|
632 |
+ |
function isMPISimSet() result(isthisSimSet) |
633 |
+ |
logical :: isthisSimSet |
634 |
+ |
if (isSimSet) then |
635 |
+ |
isthisSimSet = .true. |
636 |
+ |
else |
637 |
+ |
isthisSimSet = .false. |
638 |
+ |
endif |
639 |
+ |
end function isMPISimSet |
640 |
|
|
641 |
< |
#endif |
641 |
> |
|
642 |
> |
|
643 |
> |
|
644 |
|
end module mpiSimulation |
645 |
|
|