ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/mpi_implementation/mpiSimulation_module.F90
(Generate patch)

Comparing trunk/mdtools/mpi_implementation/mpiSimulation_module.F90 (file contents):
Revision 253 by chuckv, Thu Jan 30 15:20:21 2003 UTC vs.
Revision 259 by chuckv, Thu Jan 30 22:29:58 2003 UTC

# Line 5 | Line 5
5   !!
6   !! @author Charles F. Vardeman II
7   !! @author Matthew Meineke
8 < !! @version $Id: mpiSimulation_module.F90,v 1.4 2003-01-30 15:20:21 chuckv Exp $, $Date: 2003-01-30 15:20:21 $, $Name: not supported by cvs2svn $, $Revision: 1.4 $
8 > !! @version $Id: mpiSimulation_module.F90,v 1.6 2003-01-30 22:29:58 chuckv Exp $, $Date: 2003-01-30 22:29:58 $, $Name: not supported by cvs2svn $, $Revision: 1.6 $
9  
10  
11  
12  
13   module mpiSimulation  
14 < #ifdef IS_MPI
14 >  use definitions
15    use mpi
16    implicit none
17    PRIVATE
# Line 55 | Line 55 | module mpiSimulation  
55   !! generic mpi error declaration.
56    integer,public  :: mpi_err
57  
58 +  
59  
59
60   !! Include mpiComponentPlan type. mpiComponentPlan is a
61   !! dual header file for both c and fortran.
62   #define __FORTRAN90
# Line 72 | Line 72 | module mpiSimulation  
72   !! Logical set true if mpiSimulation has been initialized
73    logical :: isSimSet = .false.
74  
75 +
76 +  type (mpiComponentPlan) :: mpiSim
77 +
78   !! gs_plan contains plans for gather and scatter routines
79    type, public :: gs_plan
80       private
# Line 131 | Line 134 | contains
134   !! Global reference tag for local particles
135      integer, dimension(ntags),intent(inout) :: tags
136  
137 +
138      status = 0
139      if (componentPlanSet) then
140         return
141      endif
138
142      componentPlanSet = .true.
143 +    
144 + !! copy c component plan to fortran  
145 +    mpiSim = thisComponentPlan
146 +    write(*,*) "Seting up simParallel"
147 +    if (mpiSim%myNode == 0) then
148 +       write(*,*) "SetupSimParallel: writing component plan"
149 +      
150 +       write(*,*) "nMolGlobal: ", mpiSim%nMolGlobal
151 +       write(*,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
152 +       write(*,*) "nBondGlobal: ", mpiSim%nBondsGlobal
153 +       write(*,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
154 +       write(*,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
155 +       write(*,*) "myMolStart: ", mpiSim%myMolStart
156 +       write(*,*) "myMolEnd: ", mpiSim%myMolEnd
157 +       write(*,*) "myMol: ", mpiSim%myMol
158 +       write(*,*) "myNlocal: ", mpiSim%myNlocal
159 +       write(*,*) "myNode: ", mpiSim%myNode
160 +       write(*,*) "numberProcessors: ", mpiSim%numberProcessors
161 +       write(*,*) "rowComm: ", mpiSim%rowComm
162 +       write(*,*) "columnComm: ", mpiSim%columnComm
163 +       write(*,*) "numberRows: ", mpiSim%numberRows
164 +       write(*,*) "numberColumns: ", mpiSim%numberColumns
165 +       write(*,*) "nComponentsRow: ", mpiSim%nComponentsRow
166 +       write(*,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
167 +       write(*,*) "rowIndex: ", mpiSim%rowIndex
168 +       write(*,*) "columnIndex: ", mpiSim%columnIndex
169 +    endif
170  
171 <    call make_Force_Grid(thisComponentPlan,localStatus)
171 >
172 >    call make_Force_Grid(mpiSim,localStatus)
173      if (localStatus /= 0) then
174 +       write(default_error,*) "Error creating force grid"
175         status = -1
176         return
177      endif
178  
179 <    call updateGridComponents(thisComponentPlan,localStatus)
179 >    call updateGridComponents(mpiSim,localStatus)
180      if (localStatus /= 0) then
181 +       write(default_error,*) "Error updating grid components"
182         status = -1
183         return
184      endif
185      
186 +
187      !! initialize gather and scatter plans used in this simulation
188 <    call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
189 <         thisComponentPlan,row_comm,plan_row)
190 <    call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
191 <         thisComponentPlan,row_comm,plan_row3d)
192 <    call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
193 <         thisComponentPlan,col_comm,plan_col)
194 <    call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
195 <         thisComponentPlan,col_comm,plan_col3d)
188 >    call plan_gather_scatter(1,mpiSim%nComponentsRow,&
189 >         mpiSim,mpiSim%rowComm,plan_row)
190 >    call plan_gather_scatter(nDim,mpiSim%nComponentsRow,&
191 >         mpiSim,mpiSim%rowComm,plan_row3d)
192 >    call plan_gather_scatter(1,mpiSim%nComponentsColumn,&
193 >         mpiSim,mpiSim%columnComm,plan_col)
194 >    call plan_gather_scatter(nDim,mpiSim%nComponentsColumn,&
195 >         mpiSim,mpiSim%columnComm,plan_col3d)
196  
197   !  Initialize tags    
198      call setTags(tags,localStatus)
# Line 174 | Line 208 | contains
208      !! mpiComponentPlan struct from C
209      type (mpiComponentPlan), intent(inout) :: thisComponentPlan  
210      integer, intent(out) :: status
211 <    integer, intent(out) :: localStatus
212 <
211 >    integer :: localStatus
212 >    integer :: mpierror
213      status = 0
214  
215      call updateGridComponents(thisComponentPlan,localStatus)
# Line 193 | Line 227 | contains
227  
228      !! initialize gather and scatter plans used in this simulation
229      call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
230 <         thisComponentPlan,row_comm,plan_row)
230 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row)
231      call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
232 <         thisComponentPlan,row_comm,plan_row3d)
232 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
233      call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
234 <         thisComponentPlan,col_comm,plan_col)
234 >         thisComponentPlan,thisComponentPlan%columnComm,plan_col)
235      call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
236 <         thisComponentPlan,col_comm,plan_col3d)
236 >         thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
237  
238  
239  
# Line 215 | Line 249 | contains
249      integer, intent(out) :: status
250      integer :: nComponentsLocal
251      integer :: nComponentsRow = 0
252 <    integer :: nComponensColumn = 0
252 >    integer :: nComponentsColumn = 0
253      integer :: mpiErrors
254  
255      status = 0
# Line 229 | Line 263 | contains
263      nComponentsLocal = thisComponentPlan%myNlocal
264  
265      call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
266 <         mpi_sum,thisComponentPlan%rowComm,mpiError)
266 >         mpi_sum,thisComponentPlan%rowComm,mpiErrors)
267      if (mpiErrors /= 0) then
268         status = -1
269         return
270      endif
271  
272      call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
273 <         mpi_sum,thisComponentPlan%columnComm,mpiError)    
273 >         mpi_sum,thisComponentPlan%columnComm,mpiErrors)    
274      if (mpiErrors /= 0) then
275         status = -1
276         return
# Line 269 | Line 303 | contains
303  
304      if (.not. ComponentPlanSet) return
305      status = 0
306 <
306 >  
307   !! We make a dangerous assumption here that if numberProcessors is
308   !! zero, then we need to get the information from MPI.
309      if (thisComponentPlan%numberProcessors == 0 ) then
# Line 301 | Line 335 | contains
335      nRows = nWorldProcessors/nColumns
336  
337      rowIndex = myWorldRank/nColumns
338 <    call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
338 >
339 >
340 >
341 >    call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
342      if ( mpiErrors /= 0 ) then
343 +       write(default_error,*) "MPI comm split failed at row communicator"
344         status = -1
345         return
346      endif
347  
348      columnIndex = mod(myWorldRank,nColumns)
349 <    call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
349 >    call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
350      if ( mpiErrors /= 0 ) then
351 +       write(default_error,*) "MPI comm split faild at columnCommunicator"
352         status = -1
353         return
354      endif
# Line 348 | Line 387 | contains
387   !! WARNING this could be dangerous since thisComponentPlan was origionally
388   !! allocated in C++ and there is a significant difference between c and
389   !! f95 pointers....  
390 <    gsComponentPlan => thisComponetPlan
390 >    this_plan%gsComponentPlan => thisComponentPlan
391  
392   ! Set this plan size for displs array.
393      this_plan%gsPlanSize = nDim * nComponents
# Line 388 | Line 427 | contains
427  
428     !! gather all the local sizes into a size # processors array.
429      call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
430 <         1,mpi_integer,comm,mpi_err)
430 >         1,mpi_integer,thisComm,mpi_err)
431  
432      if (mpi_err /= 0) then
433         if (present(status)) status = -1
# Line 428 | Line 467 | contains
467      integer, dimension(:), intent(in) :: rbuffer
468      integer :: noffset
469      integer, intent(out), optional :: status
470 +    integer :: i
471  
472 +    
473      if (present(status)) status = 0
474      noffset = this_plan%displs(this_plan%myPlanRank)
475  
# Line 555 | Line 596 | contains
596  
597      endif
598   ! allocate column arrays
599 <    if (.not. allocated(tagCol)) then
599 >    if (.not. allocated(tagColumn)) then
600         allocate(tagColumn(ncol),STAT=alloc_stat)
601         if (alloc_stat /= 0 ) then
602            status = -1
# Line 599 | Line 640 | contains
640    
641  
642    
643 < #endif
643 >
644   end module mpiSimulation
645  

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines