ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/mpi_implementation/mpiSimulation_module.F90
Revision: 253
Committed: Thu Jan 30 15:20:21 2003 UTC (22 years, 3 months ago) by chuckv
File size: 17859 byte(s)
Log Message:
Added a generic util code directory and moved Linux_ifc_machdep to it.
MPI changes to compile MPI modules.

File Contents

# Content
1 !! MPI support for long range forces using force decomposition
2 !! on a square grid of processors.
3 !! Corresponds to mpiSimunation.cpp for C++
4 !! mpi_module also contains a private interface for mpi f90 routines.
5 !!
6 !! @author Charles F. Vardeman II
7 !! @author Matthew Meineke
8 !! @version $Id: mpiSimulation_module.F90,v 1.4 2003-01-30 15:20:21 chuckv Exp $, $Date: 2003-01-30 15:20:21 $, $Name: not supported by cvs2svn $, $Revision: 1.4 $
9
10
11
12
13 module mpiSimulation
14 #ifdef IS_MPI
15 use mpi
16 implicit none
17 PRIVATE
18
19
20 !! PUBLIC Subroutines contained in this module
21 !! gather and scatter are a generic interface
22 !! to gather and scatter routines
23 public :: gather, scatter
24 public :: setupSimParallel
25 public :: replanSimParallel
26 public :: getNcol
27 public :: getNrow
28 public :: isMPISimSet
29
30
31 !! PUBLIC Subroutines contained in MPI module
32 public :: mpi_bcast
33 public :: mpi_allreduce
34 public :: mpi_reduce
35 public :: mpi_send
36 public :: mpi_recv
37 public :: mpi_get_processor_name
38 public :: mpi_finalize
39
40 !! PUBLIC mpi variables
41 public :: mpi_comm_world
42 public :: mpi_character
43 public :: mpi_integer
44 public :: mpi_double_precision
45 public :: mpi_sum
46 public :: mpi_max
47 public :: mpi_status_size
48 public :: mpi_any_source
49
50 !! Safety logical to prevent access to ComponetPlan until
51 !! set by C++.
52 logical :: ComponentPlanSet = .false.
53
54
55 !! generic mpi error declaration.
56 integer,public :: mpi_err
57
58
59
60 !! Include mpiComponentPlan type. mpiComponentPlan is a
61 !! dual header file for both c and fortran.
62 #define __FORTRAN90
63 #include "mpiComponentPlan.h"
64
65
66
67 !! Tags used during force loop for parallel simulation
68 integer, allocatable, dimension(:) :: tagLocal
69 integer, public, allocatable, dimension(:) :: tagRow
70 integer ,public, allocatable, dimension(:) :: tagColumn
71
72 !! Logical set true if mpiSimulation has been initialized
73 logical :: isSimSet = .false.
74
75 !! gs_plan contains plans for gather and scatter routines
76 type, public :: gs_plan
77 private
78 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
79 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
80 integer :: globalPlanSize !! size of all components in plan
81 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
82 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
83 integer :: myPlanComm !! My communicator for this plan
84 integer :: myPlanRank !! My rank in this plan
85 integer :: planNprocs !! Number of processors in this plan
86 end type gs_plan
87
88 ! plans for different decompositions
89 type (gs_plan), public :: plan_row
90 type (gs_plan), public :: plan_row3d
91 type (gs_plan), public :: plan_col
92 type (gs_plan), public :: plan_col3d
93
94 type (mpiComponentPlan), pointer :: simComponentPlan
95
96 ! interface for gather scatter routines
97 !! Generic interface for gather.
98 !! Gathers an local array into row or column array
99 !! Interface provided for integer, double and double
100 !! rank 2 arrays.
101 interface gather
102 module procedure gather_integer
103 module procedure gather_double
104 module procedure gather_double_2d
105 end interface
106
107 !! Generic interface for scatter.
108 !! Scatters a row or column array, adding componets
109 !! and reducing them to a local nComponent array.
110 !! Interface provided for double and double rank=2 arrays.
111 interface scatter
112 module procedure scatter_double
113 module procedure scatter_double_2d
114 end interface
115
116
117
118 contains
119
120 !! Sets up mpiComponentPlan with structure passed from C++.
121 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
122 ! Passed Arguments
123 ! integer, intent(inout) :: nDim !! Number of dimensions
124 !! mpiComponentPlan struct from C
125 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
126 !! Number of tags passed, nlocal
127 integer, intent(in) :: ntags
128 !! Result status, 0 = normal, -1 = error
129 integer, intent(out) :: status
130 integer :: localStatus
131 !! Global reference tag for local particles
132 integer, dimension(ntags),intent(inout) :: tags
133
134 status = 0
135 if (componentPlanSet) then
136 return
137 endif
138
139 componentPlanSet = .true.
140
141 call make_Force_Grid(thisComponentPlan,localStatus)
142 if (localStatus /= 0) then
143 status = -1
144 return
145 endif
146
147 call updateGridComponents(thisComponentPlan,localStatus)
148 if (localStatus /= 0) then
149 status = -1
150 return
151 endif
152
153 !! initialize gather and scatter plans used in this simulation
154 call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
155 thisComponentPlan,row_comm,plan_row)
156 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
157 thisComponentPlan,row_comm,plan_row3d)
158 call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
159 thisComponentPlan,col_comm,plan_col)
160 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
161 thisComponentPlan,col_comm,plan_col3d)
162
163 ! Initialize tags
164 call setTags(tags,localStatus)
165 if (localStatus /= 0) then
166 status = -1
167 return
168 endif
169 isSimSet = .true.
170 end subroutine setupSimParallel
171
172 subroutine replanSimParallel(thisComponentPlan,status)
173 ! Passed Arguments
174 !! mpiComponentPlan struct from C
175 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
176 integer, intent(out) :: status
177 integer, intent(out) :: localStatus
178
179 status = 0
180
181 call updateGridComponents(thisComponentPlan,localStatus)
182 if (localStatus /= 0) then
183 status = -1
184 return
185 endif
186
187 !! Unplan Gather Scatter plans
188 call unplan_gather_scatter(plan_row)
189 call unplan_gather_scatter(plan_row3d)
190 call unplan_gather_scatter(plan_col)
191 call unplan_gather_scatter(plan_col3d)
192
193
194 !! initialize gather and scatter plans used in this simulation
195 call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
196 thisComponentPlan,row_comm,plan_row)
197 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
198 thisComponentPlan,row_comm,plan_row3d)
199 call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
200 thisComponentPlan,col_comm,plan_col)
201 call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
202 thisComponentPlan,col_comm,plan_col3d)
203
204
205
206 end subroutine replanSimParallel
207
208 !! Updates number of row and column components for long range forces.
209 subroutine updateGridComponents(thisComponentPlan,status)
210 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
211
212 !! Status return
213 !! - 0 Success
214 !! - -1 Failure
215 integer, intent(out) :: status
216 integer :: nComponentsLocal
217 integer :: nComponentsRow = 0
218 integer :: nComponensColumn = 0
219 integer :: mpiErrors
220
221 status = 0
222 if (.not. componentPlanSet) return
223
224 if (thisComponentPlan%myNlocal == 0 ) then
225 status = -1
226 return
227 endif
228
229 nComponentsLocal = thisComponentPlan%myNlocal
230
231 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
232 mpi_sum,thisComponentPlan%rowComm,mpiError)
233 if (mpiErrors /= 0) then
234 status = -1
235 return
236 endif
237
238 call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
239 mpi_sum,thisComponentPlan%columnComm,mpiError)
240 if (mpiErrors /= 0) then
241 status = -1
242 return
243 endif
244
245 thisComponentPlan%nComponentsRow = nComponentsRow
246 thisComponentPlan%nComponentsColumn = nComponentsColumn
247
248
249 end subroutine updateGridComponents
250
251
252 !! Creates a square force decomposition of processors into row and column
253 !! communicators.
254 subroutine make_Force_Grid(thisComponentPlan,status)
255 type (mpiComponentPlan) :: thisComponentPlan
256 integer, intent(out) :: status !! status returns -1 if error
257 integer :: nColumnsMax !! Maximum number of columns
258 integer :: nWorldProcessors !! Total number of processors in World comm.
259 integer :: rowIndex !! Row for this processor.
260 integer :: columnIndex !! Column for this processor.
261 integer :: nRows !! Total number of rows.
262 integer :: nColumns !! Total number of columns.
263 integer :: mpiErrors !! MPI errors.
264 integer :: rowCommunicator !! MPI row communicator.
265 integer :: columnCommunicator
266 integer :: myWorldRank
267 integer :: i
268
269
270 if (.not. ComponentPlanSet) return
271 status = 0
272
273 !! We make a dangerous assumption here that if numberProcessors is
274 !! zero, then we need to get the information from MPI.
275 if (thisComponentPlan%numberProcessors == 0 ) then
276 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
277 if ( mpiErrors /= 0 ) then
278 status = -1
279 return
280 endif
281 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
282 if ( mpiErrors /= 0 ) then
283 status = -1
284 return
285 endif
286
287 else
288 nWorldProcessors = thisComponentPlan%numberProcessors
289 myWorldRank = thisComponentPlan%myNode
290 endif
291
292
293
294
295 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
296
297 do i = 1, nColumnsMax
298 if (mod(nWorldProcessors,i) == 0) nColumns = i
299 end do
300
301 nRows = nWorldProcessors/nColumns
302
303 rowIndex = myWorldRank/nColumns
304 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
305 if ( mpiErrors /= 0 ) then
306 status = -1
307 return
308 endif
309
310 columnIndex = mod(myWorldRank,nColumns)
311 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
312 if ( mpiErrors /= 0 ) then
313 status = -1
314 return
315 endif
316
317 ! Set appropriate components of thisComponentPlan
318 thisComponentPlan%rowComm = rowCommunicator
319 thisComponentPlan%columnComm = columnCommunicator
320 thisComponentPlan%rowIndex = rowIndex
321 thisComponentPlan%columnIndex = columnIndex
322 thisComponentPlan%numberRows = nRows
323 thisComponentPlan%numberColumns = nColumns
324
325
326 end subroutine make_Force_Grid
327
328
329 !! initalizes a gather scatter plan
330 subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
331 thisComm, this_plan,status)
332 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
333 integer, intent(in) :: nComponents
334 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
335 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
336 integer, intent(in) :: thisComm !! MPI communicator for this plan
337
338 integer :: arraySize !! size to allocate plan for
339 integer, intent(out), optional :: status
340 integer :: ierror
341 integer :: i,junk
342
343 if (present(status)) status = 0
344
345
346 !! Set gsComponetPlan pointer
347 !! to the componet plan we want to use for this gather scatter plan.
348 !! WARNING this could be dangerous since thisComponentPlan was origionally
349 !! allocated in C++ and there is a significant difference between c and
350 !! f95 pointers....
351 gsComponentPlan => thisComponetPlan
352
353 ! Set this plan size for displs array.
354 this_plan%gsPlanSize = nDim * nComponents
355
356 ! Duplicate communicator for this plan
357 call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
358 if (mpi_err /= 0) then
359 if (present(status)) status = -1
360 return
361 end if
362 call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
363 if (mpi_err /= 0) then
364 if (present(status)) status = -1
365 return
366 end if
367
368 call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
369
370 if (mpi_err /= 0) then
371 if (present(status)) status = -1
372 return
373 end if
374
375 !! counts and displacements arrays are indexed from 0 to be compatable
376 !! with MPI arrays.
377 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
378 if (ierror /= 0) then
379 if (present(status)) status = -1
380 return
381 end if
382
383 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
384 if (ierror /= 0) then
385 if (present(status)) status = -1
386 return
387 end if
388
389 !! gather all the local sizes into a size # processors array.
390 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
391 1,mpi_integer,comm,mpi_err)
392
393 if (mpi_err /= 0) then
394 if (present(status)) status = -1
395 return
396 end if
397
398
399 !! figure out the total number of particles in this plan
400 this_plan%globalPlanSize = sum(this_plan%counts)
401
402
403 !! initialize plan displacements.
404 this_plan%displs(0) = 0
405 do i = 1, this_plan%planNprocs - 1,1
406 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
407 end do
408
409 end subroutine plan_gather_scatter
410
411
412 subroutine unplan_gather_scatter(this_plan)
413 type (gs_plan), intent(inout) :: this_plan
414
415
416 this_plan%gsComponentPlan => null()
417 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
418
419 deallocate(this_plan%counts)
420 deallocate(this_plan%displs)
421
422 end subroutine unplan_gather_scatter
423
424 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
425
426 type (gs_plan), intent(in) :: this_plan
427 integer, dimension(:), intent(in) :: sbuffer
428 integer, dimension(:), intent(in) :: rbuffer
429 integer :: noffset
430 integer, intent(out), optional :: status
431
432 if (present(status)) status = 0
433 noffset = this_plan%displs(this_plan%myPlanRank)
434
435 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
436 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
437 this_plan%myPlanComm, mpi_err)
438
439 if (mpi_err /= 0) then
440 if (present(status)) status = -1
441 endif
442
443 end subroutine gather_integer
444
445 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
446
447 type (gs_plan), intent(in) :: this_plan
448 real( kind = DP ), dimension(:), intent(in) :: sbuffer
449 real( kind = DP ), dimension(:), intent(in) :: rbuffer
450 integer :: noffset
451 integer, intent(out), optional :: status
452
453 if (present(status)) status = 0
454 noffset = this_plan%displs(this_plan%myPlanRank)
455
456 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
457 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
458 this_plan%myPlanComm, mpi_err)
459
460 if (mpi_err /= 0) then
461 if (present(status)) status = -1
462 endif
463
464 end subroutine gather_double
465
466 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
467
468 type (gs_plan), intent(in) :: this_plan
469 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
470 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
471 integer :: noffset,i,ierror
472 integer, intent(out), optional :: status
473
474 external mpi_allgatherv
475
476 if (present(status)) status = 0
477
478 ! noffset = this_plan%displs(this_plan%me)
479
480 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
481 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
482 this_plan%myPlanComm, mpi_err)
483
484 if (mpi_err /= 0) then
485 if (present(status)) status = -1
486 endif
487
488 end subroutine gather_double_2d
489
490 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
491
492 type (gs_plan), intent(in) :: this_plan
493 real( kind = DP ), dimension(:), intent(in) :: sbuffer
494 real( kind = DP ), dimension(:), intent(in) :: rbuffer
495 integer, intent(out), optional :: status
496 external mpi_reduce_scatter
497
498 if (present(status)) status = 0
499
500 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
501 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
502
503 if (mpi_err /= 0) then
504 if (present(status)) status = -1
505 endif
506
507 end subroutine scatter_double
508
509 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
510
511 type (gs_plan), intent(in) :: this_plan
512 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
513 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
514 integer, intent(out), optional :: status
515 external mpi_reduce_scatter
516
517 if (present(status)) status = 0
518 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
519 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
520
521 if (mpi_err /= 0) then
522 if (present(status)) status = -1
523 endif
524
525 end subroutine scatter_double_2d
526
527
528 subroutine setTags(tags,status)
529 integer, dimension(:) :: tags
530 integer :: status
531
532 integer :: alloc_stat
533
534 integer :: ncol
535 integer :: nrow
536
537 status = 0
538 ! allocate row arrays
539 nrow = getNrow(plan_row)
540 ncol = getNcol(plan_col)
541
542 if (.not. allocated(tagRow)) then
543 allocate(tagRow(nrow),STAT=alloc_stat)
544 if (alloc_stat /= 0 ) then
545 status = -1
546 return
547 endif
548 else
549 deallocate(tagRow)
550 allocate(tagRow(nrow),STAT=alloc_stat)
551 if (alloc_stat /= 0 ) then
552 status = -1
553 return
554 endif
555
556 endif
557 ! allocate column arrays
558 if (.not. allocated(tagCol)) then
559 allocate(tagColumn(ncol),STAT=alloc_stat)
560 if (alloc_stat /= 0 ) then
561 status = -1
562 return
563 endif
564 else
565 deallocate(tagColumn)
566 allocate(tagColumn(ncol),STAT=alloc_stat)
567 if (alloc_stat /= 0 ) then
568 status = -1
569 return
570 endif
571 endif
572
573 call gather(tags,tagRow,plan_row)
574 call gather(tags,tagColumn,plan_col)
575
576
577 end subroutine setTags
578
579 pure function getNcol(thisplan) result(ncol)
580 type (gs_plan), intent(in) :: thisplan
581 integer :: ncol
582 ncol = thisplan%gsComponentPlan%nComponentsColumn
583 end function getNcol
584
585 pure function getNrow(thisplan) result(ncol)
586 type (gs_plan), intent(in) :: thisplan
587 integer :: ncol
588 ncol = thisplan%gsComponentPlan%nComponentsrow
589 end function getNrow
590
591 function isMPISimSet() result(isthisSimSet)
592 logical :: isthisSimSet
593 if (isSimSet) then
594 isthisSimSet = .true.
595 else
596 isthisSimSet = .false.
597 endif
598 end function isMPISimSet
599
600
601
602 #endif
603 end module mpiSimulation
604