ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/mpi_implementation/mpiSimulation_module.F90
(Generate patch)

Comparing trunk/mdtools/mpi_implementation/mpiSimulation_module.F90 (file contents):
Revision 215 by chuckv, Thu Dec 19 21:59:51 2002 UTC vs.
Revision 254 by chuckv, Thu Jan 30 20:03:37 2003 UTC

# Line 5 | Line 5
5   !!
6   !! @author Charles F. Vardeman II
7   !! @author Matthew Meineke
8 < !! @version $Id: mpiSimulation_module.F90,v 1.1 2002-12-19 21:59:51 chuckv Exp $, $Date: 2002-12-19 21:59:51 $, $Name: not supported by cvs2svn $, $Revision: 1.1 $
8 > !! @version $Id: mpiSimulation_module.F90,v 1.5 2003-01-30 20:03:37 chuckv Exp $, $Date: 2003-01-30 20:03:37 $, $Name: not supported by cvs2svn $, $Revision: 1.5 $
9  
10  
11  
12  
13   module mpiSimulation  
14 < #ifdef MPI
14 >  use definitions
15    use mpi
16    implicit none
17    PRIVATE
# Line 23 | Line 23 | module mpiSimulation  
23    public :: gather, scatter
24    public :: setupSimParallel
25    public :: replanSimParallel
26 +  public :: getNcol
27 +  public :: getNrow
28 +  public :: isMPISimSet
29 +
30 +
31   !! PUBLIC  Subroutines contained in MPI module
32    public :: mpi_bcast
33    public :: mpi_allreduce
# Line 50 | Line 55 | module mpiSimulation  
55   !! generic mpi error declaration.
56    integer,public  :: mpi_err
57  
58 +
59 +
60   !! Include mpiComponentPlan type. mpiComponentPlan is a
61   !! dual header file for both c and fortran.
62   #define __FORTRAN90
63   #include "mpiComponentPlan.h"
64  
65 +
66 +
67 + !! Tags used during force loop for parallel simulation
68 +  integer, allocatable, dimension(:) :: tagLocal
69 +  integer, public, allocatable, dimension(:) :: tagRow
70 +  integer ,public, allocatable, dimension(:) :: tagColumn
71 +
72 + !! Logical set true if mpiSimulation has been initialized
73 +  logical :: isSimSet = .false.
74 +
75   !! gs_plan contains plans for gather and scatter routines
76    type, public :: gs_plan
77       private
# Line 97 | Line 114 | module mpiSimulation  
114    end interface
115  
116  
117 +
118   contains
119  
120   !! Sets up mpiComponentPlan with structure passed from C++.
121 <  subroutine setupSimParallel(thisComponentPlan,status)
121 >  subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
122   !  Passed Arguments
123   !    integer, intent(inout) :: nDim !! Number of dimensions
124      !! mpiComponentPlan struct from C
125 <    type (mpiComponentPlan), intent(inout) :: thisComponentPlan  
125 >    type (mpiComponentPlan), intent(inout) :: thisComponentPlan
126 > !! Number of tags passed, nlocal  
127 >    integer, intent(in) :: ntags
128 > !! Result status, 0 = normal, -1 = error
129      integer, intent(out) :: status
130 <    integer, intnet(out) :: localStatus
130 >    integer :: localStatus
131 > !! Global reference tag for local particles
132 >    integer, dimension(ntags),intent(inout) :: tags
133  
134 +
135      status = 0
136      if (componentPlanSet) then
137         return
138      endif
115
139      componentPlanSet = .true.
140 <
140 >    
141 >    
142      call make_Force_Grid(thisComponentPlan,localStatus)
143      if (localStatus /= 0) then
144 +       write(default_error,*) "Error creating force grid"
145         status = -1
146         return
147      endif
148  
149      call updateGridComponents(thisComponentPlan,localStatus)
150      if (localStatus /= 0) then
151 +       write(default_error,*) "Error updating grid components"
152         status = -1
153         return
154      endif
155      
156 +
157      !! initialize gather and scatter plans used in this simulation
158      call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
159 <         thisComponentPlan,row_comm,plan_row)
159 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row)
160      call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
161 <         thisComponentPlan,row_comm,plan_row3d)
161 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
162      call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
163 <         thisComponentPlan,col_comm,plan_col)
163 >         thisComponentPlan,thisComponentPlan%columnComm,plan_col)
164      call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
165 <         thisComponentPlan,col_comm,plan_col3d)
165 >         thisComponentPlan,thisComponentPlan%columnComm,plan_col3d)
166  
167 <
168 <
167 > !  Initialize tags    
168 >    call setTags(tags,localStatus)
169 >    if (localStatus /= 0) then
170 >       status = -1
171 >       return
172 >    endif
173 >    isSimSet = .true.
174    end subroutine setupSimParallel
175  
176    subroutine replanSimParallel(thisComponentPlan,status)
# Line 146 | Line 178 | contains
178      !! mpiComponentPlan struct from C
179      type (mpiComponentPlan), intent(inout) :: thisComponentPlan  
180      integer, intent(out) :: status
181 <    integer, intnet(out) :: localStatus
182 <
181 >    integer :: localStatus
182 >    integer :: mpierror
183      status = 0
184  
185      call updateGridComponents(thisComponentPlan,localStatus)
# Line 165 | Line 197 | contains
197  
198      !! initialize gather and scatter plans used in this simulation
199      call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
200 <         thisComponentPlan,row_comm,plan_row)
200 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row)
201      call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
202 <         thisComponentPlan,row_comm,plan_row3d)
202 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
203      call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
204 <         thisComponentPlan,col_comm,plan_col)
204 >         thisComponentPlan,thisComponentPlan%columnComm,plan_col)
205      call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
206 <         thisComponentPlan,col_comm,plan_col3d)
206 >         thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
207  
208  
209  
# Line 187 | Line 219 | contains
219      integer, intent(out) :: status
220      integer :: nComponentsLocal
221      integer :: nComponentsRow = 0
222 <    integer :: nComponensColumn = 0
222 >    integer :: nComponentsColumn = 0
223      integer :: mpiErrors
224  
225      status = 0
# Line 201 | Line 233 | contains
233      nComponentsLocal = thisComponentPlan%myNlocal
234  
235      call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
236 <         mpi_sum,thisComponentPlan%rowComm,mpiError)
236 >         mpi_sum,thisComponentPlan%rowComm,mpiErrors)
237      if (mpiErrors /= 0) then
238         status = -1
239         return
240      endif
241  
242      call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
243 <         mpi_sum,thisComponentPlan%columnComm,mpiError)    
243 >         mpi_sum,thisComponentPlan%columnComm,mpiErrors)    
244      if (mpiErrors /= 0) then
245         status = -1
246         return
# Line 241 | Line 273 | contains
273  
274      if (.not. ComponentPlanSet) return
275      status = 0
276 <
276 >  
277   !! We make a dangerous assumption here that if numberProcessors is
278   !! zero, then we need to get the information from MPI.
279      if (thisComponentPlan%numberProcessors == 0 ) then
# Line 258 | Line 290 | contains
290  
291      else
292         nWorldProcessors = thisComponentPlan%numberProcessors
293 <       myWorldRank = thisComponentPlan%myRank
293 >       myWorldRank = thisComponentPlan%myNode
294      endif
295  
296  
# Line 273 | Line 305 | contains
305      nRows = nWorldProcessors/nColumns
306  
307      rowIndex = myWorldRank/nColumns
308 <    call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
308 >
309 >
310 >
311 >    call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
312      if ( mpiErrors /= 0 ) then
313 +       write(default_error,*) "MPI comm split failed at row communicator"
314         status = -1
315         return
316      endif
317  
318      columnIndex = mod(myWorldRank,nColumns)
319 <    call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
319 >    call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
320      if ( mpiErrors /= 0 ) then
321 +       write(default_error,*) "MPI comm split faild at columnCommunicator"
322         status = -1
323         return
324      endif
# Line 320 | Line 357 | contains
357   !! WARNING this could be dangerous since thisComponentPlan was origionally
358   !! allocated in C++ and there is a significant difference between c and
359   !! f95 pointers....  
360 <    gsComponentPlan => thisComponetPlan
360 >    this_plan%gsComponentPlan => thisComponentPlan
361  
362   ! Set this plan size for displs array.
363      this_plan%gsPlanSize = nDim * nComponents
# Line 359 | Line 396 | contains
396      end if
397  
398     !! gather all the local sizes into a size # processors array.
399 <    call mpi_allgather(gs_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
400 <         1,mpi_integer,comm,mpi_err)
399 >    call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
400 >         1,mpi_integer,thisComm,mpi_err)
401  
402      if (mpi_err /= 0) then
403         if (present(status)) status = -1
# Line 386 | Line 423 | contains
423      
424      
425      this_plan%gsComponentPlan => null()
426 <    call mpi_comm_free(this_plan%comm,mpi_err)
426 >    call mpi_comm_free(this_plan%myPlanComm,mpi_err)
427  
428      deallocate(this_plan%counts)
429      deallocate(this_plan%displs)
# Line 423 | Line 460 | contains
460      integer, intent(out), optional :: status
461  
462      if (present(status)) status = 0
463 <    noffset = this_plan%displs(this_plan%me)
463 >    noffset = this_plan%displs(this_plan%myPlanRank)
464  
465      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
466           rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
# Line 496 | Line 533 | contains
533  
534    end subroutine scatter_double_2d
535  
536 +
537 +  subroutine setTags(tags,status)
538 +    integer, dimension(:) :: tags
539 +    integer :: status
540 +
541 +    integer :: alloc_stat
542 +    
543 +    integer :: ncol
544 +    integer :: nrow
545 +
546 +    status = 0
547 + ! allocate row arrays
548 +    nrow = getNrow(plan_row)
549 +    ncol = getNcol(plan_col)
550 +
551 +    if (.not. allocated(tagRow)) then
552 +       allocate(tagRow(nrow),STAT=alloc_stat)
553 +       if (alloc_stat /= 0 ) then
554 +          status = -1
555 +          return
556 +       endif
557 +    else
558 +       deallocate(tagRow)
559 +       allocate(tagRow(nrow),STAT=alloc_stat)
560 +       if (alloc_stat /= 0 ) then
561 +          status = -1
562 +          return
563 +       endif
564 +
565 +    endif
566 + ! allocate column arrays
567 +    if (.not. allocated(tagColumn)) then
568 +       allocate(tagColumn(ncol),STAT=alloc_stat)
569 +       if (alloc_stat /= 0 ) then
570 +          status = -1
571 +          return
572 +       endif
573 +    else
574 +       deallocate(tagColumn)
575 +       allocate(tagColumn(ncol),STAT=alloc_stat)
576 +       if (alloc_stat /= 0 ) then
577 +          status = -1
578 +          return
579 +       endif
580 +    endif
581 +    
582 +    call gather(tags,tagRow,plan_row)
583 +    call gather(tags,tagColumn,plan_col)
584 +
585 +
586 +  end subroutine setTags
587 +
588 +  pure function getNcol(thisplan) result(ncol)
589 +    type (gs_plan), intent(in) :: thisplan
590 +    integer :: ncol
591 +    ncol = thisplan%gsComponentPlan%nComponentsColumn
592 +  end function getNcol
593 +
594 +  pure function getNrow(thisplan) result(ncol)
595 +    type (gs_plan), intent(in) :: thisplan
596 +    integer :: ncol
597 +    ncol = thisplan%gsComponentPlan%nComponentsrow
598 +  end function getNrow
599 +
600 +  function isMPISimSet() result(isthisSimSet)
601 +    logical :: isthisSimSet
602 +    if (isSimSet) then
603 +       isthisSimSet = .true.
604 +    else
605 +       isthisSimSet = .false.
606 +    endif
607 +  end function isMPISimSet
608    
609 < #endif
609 >
610 >  
611 >
612   end module mpiSimulation
613  

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines