ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/mpi_implementation/mpiSimulation_module.F90
Revision: 254
Committed: Thu Jan 30 20:03:37 2003 UTC (22 years, 3 months ago) by chuckv
File size: 18320 byte(s)
Log Message:
Bug fixes for mpi version of code

File Contents

# User Rev Content
1 chuckv 215 !! MPI support for long range forces using force decomposition
2     !! on a square grid of processors.
3     !! Corresponds to mpiSimunation.cpp for C++
4     !! mpi_module also contains a private interface for mpi f90 routines.
5     !!
6     !! @author Charles F. Vardeman II
7     !! @author Matthew Meineke
8 chuckv 254 !! @version $Id: mpiSimulation_module.F90,v 1.5 2003-01-30 20:03:37 chuckv Exp $, $Date: 2003-01-30 20:03:37 $, $Name: not supported by cvs2svn $, $Revision: 1.5 $
9 chuckv 215
10    
11    
12    
13     module mpiSimulation
14 chuckv 254 use definitions
15 chuckv 215 use mpi
16     implicit none
17     PRIVATE
18    
19    
20     !! PUBLIC Subroutines contained in this module
21     !! gather and scatter are a generic interface
22     !! to gather and scatter routines
23     public :: gather, scatter
24     public :: setupSimParallel
25     public :: replanSimParallel
26 chuckv 230 public :: getNcol
27     public :: getNrow
28 chuckv 239 public :: isMPISimSet
29 chuckv 230
30    
31 chuckv 215 !! PUBLIC Subroutines contained in MPI module
32     public :: mpi_bcast
33     public :: mpi_allreduce
34     public :: mpi_reduce
35     public :: mpi_send
36     public :: mpi_recv
37     public :: mpi_get_processor_name
38     public :: mpi_finalize
39    
40     !! PUBLIC mpi variables
41     public :: mpi_comm_world
42     public :: mpi_character
43     public :: mpi_integer
44     public :: mpi_double_precision
45     public :: mpi_sum
46     public :: mpi_max
47     public :: mpi_status_size
48     public :: mpi_any_source
49    
50     !! Safety logical to prevent access to ComponetPlan until
51     !! set by C++.
52     logical :: ComponentPlanSet = .false.
53    
54    
55     !! generic mpi error declaration.
56     integer,public :: mpi_err
57    
58 chuckv 239
59    
60 chuckv 215 !! Include mpiComponentPlan type. mpiComponentPlan is a
61     !! dual header file for both c and fortran.
62     #define __FORTRAN90
63     #include "mpiComponentPlan.h"
64    
65 chuckv 239
66    
67     !! Tags used during force loop for parallel simulation
68     integer, allocatable, dimension(:) :: tagLocal
69     integer, public, allocatable, dimension(:) :: tagRow
70     integer ,public, allocatable, dimension(:) :: tagColumn
71    
72     !! Logical set true if mpiSimulation has been initialized
73 chuckv 253 logical :: isSimSet = .false.
74 chuckv 239
75 chuckv 215 !! gs_plan contains plans for gather and scatter routines
76     type, public :: gs_plan
77     private
78     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
79     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
80     integer :: globalPlanSize !! size of all components in plan
81     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
82     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
83     integer :: myPlanComm !! My communicator for this plan
84     integer :: myPlanRank !! My rank in this plan
85     integer :: planNprocs !! Number of processors in this plan
86     end type gs_plan
87    
88     ! plans for different decompositions
89     type (gs_plan), public :: plan_row
90     type (gs_plan), public :: plan_row3d
91     type (gs_plan), public :: plan_col
92     type (gs_plan), public :: plan_col3d
93    
94     type (mpiComponentPlan), pointer :: simComponentPlan
95    
96     ! interface for gather scatter routines
97     !! Generic interface for gather.
98     !! Gathers an local array into row or column array
99     !! Interface provided for integer, double and double
100     !! rank 2 arrays.
101     interface gather
102     module procedure gather_integer
103     module procedure gather_double
104     module procedure gather_double_2d
105     end interface
106    
107     !! Generic interface for scatter.
108     !! Scatters a row or column array, adding componets
109     !! and reducing them to a local nComponent array.
110     !! Interface provided for double and double rank=2 arrays.
111     interface scatter
112     module procedure scatter_double
113     module procedure scatter_double_2d
114     end interface
115    
116    
117 chuckv 239
118 chuckv 215 contains
119    
120     !! Sets up mpiComponentPlan with structure passed from C++.
121 chuckv 239 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
122 chuckv 215 ! Passed Arguments
123     ! integer, intent(inout) :: nDim !! Number of dimensions
124     !! mpiComponentPlan struct from C
125 chuckv 239 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
126     !! Number of tags passed, nlocal
127     integer, intent(in) :: ntags
128     !! Result status, 0 = normal, -1 = error
129 chuckv 215 integer, intent(out) :: status
130 chuckv 239 integer :: localStatus
131     !! Global reference tag for local particles
132     integer, dimension(ntags),intent(inout) :: tags
133 chuckv 215
134 chuckv 254
135 chuckv 215 status = 0
136     if (componentPlanSet) then
137     return
138     endif
139     componentPlanSet = .true.
140 chuckv 254
141    
142 chuckv 215 call make_Force_Grid(thisComponentPlan,localStatus)
143     if (localStatus /= 0) then
144 chuckv 254 write(default_error,*) "Error creating force grid"
145 chuckv 215 status = -1
146     return
147     endif
148    
149     call updateGridComponents(thisComponentPlan,localStatus)
150     if (localStatus /= 0) then
151 chuckv 254 write(default_error,*) "Error updating grid components"
152 chuckv 215 status = -1
153     return
154     endif
155    
156 chuckv 254
157 chuckv 215 !! initialize gather and scatter plans used in this simulation
158     call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
159 chuckv 254 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
160 chuckv 215 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
161 chuckv 254 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
162 chuckv 215 call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
163 chuckv 254 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
164 chuckv 215 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
165 chuckv 254 thisComponentPlan,thisComponentPlan%columnComm,plan_col3d)
166 chuckv 215
167 chuckv 239 ! Initialize tags
168     call setTags(tags,localStatus)
169     if (localStatus /= 0) then
170     status = -1
171     return
172     endif
173     isSimSet = .true.
174 chuckv 215 end subroutine setupSimParallel
175    
176     subroutine replanSimParallel(thisComponentPlan,status)
177     ! Passed Arguments
178     !! mpiComponentPlan struct from C
179     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
180     integer, intent(out) :: status
181 chuckv 254 integer :: localStatus
182     integer :: mpierror
183 chuckv 215 status = 0
184    
185     call updateGridComponents(thisComponentPlan,localStatus)
186     if (localStatus /= 0) then
187     status = -1
188     return
189     endif
190    
191     !! Unplan Gather Scatter plans
192     call unplan_gather_scatter(plan_row)
193     call unplan_gather_scatter(plan_row3d)
194     call unplan_gather_scatter(plan_col)
195     call unplan_gather_scatter(plan_col3d)
196    
197    
198     !! initialize gather and scatter plans used in this simulation
199     call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
200 chuckv 254 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
201 chuckv 215 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
202 chuckv 254 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
203 chuckv 215 call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
204 chuckv 254 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
205 chuckv 215 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
206 chuckv 254 thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
207 chuckv 215
208    
209    
210     end subroutine replanSimParallel
211    
212     !! Updates number of row and column components for long range forces.
213     subroutine updateGridComponents(thisComponentPlan,status)
214     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
215    
216     !! Status return
217     !! - 0 Success
218     !! - -1 Failure
219     integer, intent(out) :: status
220     integer :: nComponentsLocal
221     integer :: nComponentsRow = 0
222 chuckv 254 integer :: nComponentsColumn = 0
223 chuckv 215 integer :: mpiErrors
224    
225     status = 0
226     if (.not. componentPlanSet) return
227    
228     if (thisComponentPlan%myNlocal == 0 ) then
229     status = -1
230     return
231     endif
232    
233     nComponentsLocal = thisComponentPlan%myNlocal
234    
235     call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
236 chuckv 254 mpi_sum,thisComponentPlan%rowComm,mpiErrors)
237 chuckv 215 if (mpiErrors /= 0) then
238     status = -1
239     return
240     endif
241    
242     call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
243 chuckv 254 mpi_sum,thisComponentPlan%columnComm,mpiErrors)
244 chuckv 215 if (mpiErrors /= 0) then
245     status = -1
246     return
247     endif
248    
249     thisComponentPlan%nComponentsRow = nComponentsRow
250     thisComponentPlan%nComponentsColumn = nComponentsColumn
251    
252    
253     end subroutine updateGridComponents
254    
255    
256     !! Creates a square force decomposition of processors into row and column
257     !! communicators.
258     subroutine make_Force_Grid(thisComponentPlan,status)
259     type (mpiComponentPlan) :: thisComponentPlan
260     integer, intent(out) :: status !! status returns -1 if error
261     integer :: nColumnsMax !! Maximum number of columns
262     integer :: nWorldProcessors !! Total number of processors in World comm.
263     integer :: rowIndex !! Row for this processor.
264     integer :: columnIndex !! Column for this processor.
265     integer :: nRows !! Total number of rows.
266     integer :: nColumns !! Total number of columns.
267     integer :: mpiErrors !! MPI errors.
268     integer :: rowCommunicator !! MPI row communicator.
269     integer :: columnCommunicator
270     integer :: myWorldRank
271     integer :: i
272    
273    
274     if (.not. ComponentPlanSet) return
275     status = 0
276 chuckv 254
277 chuckv 215 !! We make a dangerous assumption here that if numberProcessors is
278     !! zero, then we need to get the information from MPI.
279     if (thisComponentPlan%numberProcessors == 0 ) then
280     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
281     if ( mpiErrors /= 0 ) then
282     status = -1
283     return
284     endif
285     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
286     if ( mpiErrors /= 0 ) then
287     status = -1
288     return
289     endif
290    
291     else
292     nWorldProcessors = thisComponentPlan%numberProcessors
293 chuckv 253 myWorldRank = thisComponentPlan%myNode
294 chuckv 215 endif
295    
296    
297    
298    
299     nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
300    
301     do i = 1, nColumnsMax
302     if (mod(nWorldProcessors,i) == 0) nColumns = i
303     end do
304    
305     nRows = nWorldProcessors/nColumns
306    
307     rowIndex = myWorldRank/nColumns
308 chuckv 254
309    
310    
311     call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
312 chuckv 215 if ( mpiErrors /= 0 ) then
313 chuckv 254 write(default_error,*) "MPI comm split failed at row communicator"
314 chuckv 215 status = -1
315     return
316     endif
317    
318     columnIndex = mod(myWorldRank,nColumns)
319 chuckv 254 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
320 chuckv 215 if ( mpiErrors /= 0 ) then
321 chuckv 254 write(default_error,*) "MPI comm split faild at columnCommunicator"
322 chuckv 215 status = -1
323     return
324     endif
325    
326     ! Set appropriate components of thisComponentPlan
327     thisComponentPlan%rowComm = rowCommunicator
328     thisComponentPlan%columnComm = columnCommunicator
329     thisComponentPlan%rowIndex = rowIndex
330     thisComponentPlan%columnIndex = columnIndex
331     thisComponentPlan%numberRows = nRows
332     thisComponentPlan%numberColumns = nColumns
333    
334    
335     end subroutine make_Force_Grid
336    
337    
338     !! initalizes a gather scatter plan
339     subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
340     thisComm, this_plan,status)
341     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
342     integer, intent(in) :: nComponents
343     type (mpiComponentPlan), intent(in), target :: thisComponentPlan
344     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
345     integer, intent(in) :: thisComm !! MPI communicator for this plan
346    
347     integer :: arraySize !! size to allocate plan for
348     integer, intent(out), optional :: status
349     integer :: ierror
350     integer :: i,junk
351    
352     if (present(status)) status = 0
353    
354    
355     !! Set gsComponetPlan pointer
356     !! to the componet plan we want to use for this gather scatter plan.
357     !! WARNING this could be dangerous since thisComponentPlan was origionally
358     !! allocated in C++ and there is a significant difference between c and
359     !! f95 pointers....
360 chuckv 254 this_plan%gsComponentPlan => thisComponentPlan
361 chuckv 215
362     ! Set this plan size for displs array.
363     this_plan%gsPlanSize = nDim * nComponents
364    
365     ! Duplicate communicator for this plan
366     call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
367     if (mpi_err /= 0) then
368     if (present(status)) status = -1
369     return
370     end if
371     call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
372     if (mpi_err /= 0) then
373     if (present(status)) status = -1
374     return
375     end if
376    
377     call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
378    
379     if (mpi_err /= 0) then
380     if (present(status)) status = -1
381     return
382     end if
383    
384     !! counts and displacements arrays are indexed from 0 to be compatable
385     !! with MPI arrays.
386     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
387     if (ierror /= 0) then
388     if (present(status)) status = -1
389     return
390     end if
391    
392     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
393     if (ierror /= 0) then
394     if (present(status)) status = -1
395     return
396     end if
397    
398     !! gather all the local sizes into a size # processors array.
399 chuckv 253 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
400 chuckv 254 1,mpi_integer,thisComm,mpi_err)
401 chuckv 215
402     if (mpi_err /= 0) then
403     if (present(status)) status = -1
404     return
405     end if
406    
407    
408     !! figure out the total number of particles in this plan
409     this_plan%globalPlanSize = sum(this_plan%counts)
410    
411    
412     !! initialize plan displacements.
413     this_plan%displs(0) = 0
414     do i = 1, this_plan%planNprocs - 1,1
415     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
416     end do
417    
418     end subroutine plan_gather_scatter
419    
420    
421     subroutine unplan_gather_scatter(this_plan)
422     type (gs_plan), intent(inout) :: this_plan
423    
424    
425     this_plan%gsComponentPlan => null()
426 chuckv 253 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
427 chuckv 215
428     deallocate(this_plan%counts)
429     deallocate(this_plan%displs)
430    
431     end subroutine unplan_gather_scatter
432    
433     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
434    
435     type (gs_plan), intent(in) :: this_plan
436     integer, dimension(:), intent(in) :: sbuffer
437     integer, dimension(:), intent(in) :: rbuffer
438     integer :: noffset
439     integer, intent(out), optional :: status
440    
441     if (present(status)) status = 0
442     noffset = this_plan%displs(this_plan%myPlanRank)
443    
444     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
445     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
446     this_plan%myPlanComm, mpi_err)
447    
448     if (mpi_err /= 0) then
449     if (present(status)) status = -1
450     endif
451    
452     end subroutine gather_integer
453    
454     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
455    
456     type (gs_plan), intent(in) :: this_plan
457     real( kind = DP ), dimension(:), intent(in) :: sbuffer
458     real( kind = DP ), dimension(:), intent(in) :: rbuffer
459     integer :: noffset
460     integer, intent(out), optional :: status
461    
462     if (present(status)) status = 0
463 chuckv 253 noffset = this_plan%displs(this_plan%myPlanRank)
464 chuckv 215
465     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
466     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
467     this_plan%myPlanComm, mpi_err)
468    
469     if (mpi_err /= 0) then
470     if (present(status)) status = -1
471     endif
472    
473     end subroutine gather_double
474    
475     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
476    
477     type (gs_plan), intent(in) :: this_plan
478     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
479     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
480     integer :: noffset,i,ierror
481     integer, intent(out), optional :: status
482    
483     external mpi_allgatherv
484    
485     if (present(status)) status = 0
486    
487     ! noffset = this_plan%displs(this_plan%me)
488    
489     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
490     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
491     this_plan%myPlanComm, mpi_err)
492    
493     if (mpi_err /= 0) then
494     if (present(status)) status = -1
495     endif
496    
497     end subroutine gather_double_2d
498    
499     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
500    
501     type (gs_plan), intent(in) :: this_plan
502     real( kind = DP ), dimension(:), intent(in) :: sbuffer
503     real( kind = DP ), dimension(:), intent(in) :: rbuffer
504     integer, intent(out), optional :: status
505     external mpi_reduce_scatter
506    
507     if (present(status)) status = 0
508    
509     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
510     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
511    
512     if (mpi_err /= 0) then
513     if (present(status)) status = -1
514     endif
515    
516     end subroutine scatter_double
517    
518     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
519    
520     type (gs_plan), intent(in) :: this_plan
521     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
522     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
523     integer, intent(out), optional :: status
524     external mpi_reduce_scatter
525    
526     if (present(status)) status = 0
527     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
528     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
529    
530     if (mpi_err /= 0) then
531     if (present(status)) status = -1
532     endif
533    
534     end subroutine scatter_double_2d
535    
536 chuckv 230
537 chuckv 239 subroutine setTags(tags,status)
538 chuckv 253 integer, dimension(:) :: tags
539 chuckv 239 integer :: status
540    
541     integer :: alloc_stat
542    
543 chuckv 253 integer :: ncol
544     integer :: nrow
545    
546 chuckv 239 status = 0
547     ! allocate row arrays
548 chuckv 253 nrow = getNrow(plan_row)
549     ncol = getNcol(plan_col)
550    
551 chuckv 239 if (.not. allocated(tagRow)) then
552 chuckv 253 allocate(tagRow(nrow),STAT=alloc_stat)
553 chuckv 239 if (alloc_stat /= 0 ) then
554     status = -1
555     return
556     endif
557     else
558     deallocate(tagRow)
559 chuckv 253 allocate(tagRow(nrow),STAT=alloc_stat)
560 chuckv 239 if (alloc_stat /= 0 ) then
561     status = -1
562     return
563     endif
564    
565     endif
566     ! allocate column arrays
567 chuckv 254 if (.not. allocated(tagColumn)) then
568 chuckv 253 allocate(tagColumn(ncol),STAT=alloc_stat)
569 chuckv 239 if (alloc_stat /= 0 ) then
570     status = -1
571     return
572     endif
573     else
574 chuckv 253 deallocate(tagColumn)
575     allocate(tagColumn(ncol),STAT=alloc_stat)
576 chuckv 239 if (alloc_stat /= 0 ) then
577     status = -1
578     return
579     endif
580     endif
581    
582     call gather(tags,tagRow,plan_row)
583 chuckv 253 call gather(tags,tagColumn,plan_col)
584 chuckv 239
585    
586     end subroutine setTags
587    
588 chuckv 253 pure function getNcol(thisplan) result(ncol)
589     type (gs_plan), intent(in) :: thisplan
590 chuckv 230 integer :: ncol
591 chuckv 253 ncol = thisplan%gsComponentPlan%nComponentsColumn
592 chuckv 230 end function getNcol
593    
594 chuckv 253 pure function getNrow(thisplan) result(ncol)
595     type (gs_plan), intent(in) :: thisplan
596 chuckv 230 integer :: ncol
597     ncol = thisplan%gsComponentPlan%nComponentsrow
598     end function getNrow
599    
600 chuckv 253 function isMPISimSet() result(isthisSimSet)
601 chuckv 239 logical :: isthisSimSet
602     if (isSimSet) then
603     isthisSimSet = .true.
604     else
605     isthisSimSet = .false.
606     endif
607     end function isMPISimSet
608 chuckv 215
609 chuckv 230
610    
611 chuckv 254
612 chuckv 215 end module mpiSimulation
613