ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/mpi_implementation/mpiSimulation_module.F90
(Generate patch)

Comparing trunk/mdtools/mpi_implementation/mpiSimulation_module.F90 (file contents):
Revision 215 by chuckv, Thu Dec 19 21:59:51 2002 UTC vs.
Revision 260 by chuckv, Fri Jan 31 21:04:27 2003 UTC

# Line 5 | Line 5
5   !!
6   !! @author Charles F. Vardeman II
7   !! @author Matthew Meineke
8 < !! @version $Id: mpiSimulation_module.F90,v 1.1 2002-12-19 21:59:51 chuckv Exp $, $Date: 2002-12-19 21:59:51 $, $Name: not supported by cvs2svn $, $Revision: 1.1 $
8 > !! @version $Id: mpiSimulation_module.F90,v 1.7 2003-01-31 21:04:27 chuckv Exp $, $Date: 2003-01-31 21:04:27 $, $Name: not supported by cvs2svn $, $Revision: 1.7 $
9  
10  
11  
12  
13   module mpiSimulation  
14 < #ifdef MPI
14 >  use definitions
15    use mpi
16    implicit none
17    PRIVATE
# Line 23 | Line 23 | module mpiSimulation  
23    public :: gather, scatter
24    public :: setupSimParallel
25    public :: replanSimParallel
26 +  public :: getNcol
27 +  public :: getNrow
28 +  public :: isMPISimSet
29 +  public :: printComponentPlan
30 +  public :: getMyNode
31 +
32   !! PUBLIC  Subroutines contained in MPI module
33    public :: mpi_bcast
34    public :: mpi_allreduce
# Line 50 | Line 56 | module mpiSimulation  
56   !! generic mpi error declaration.
57    integer,public  :: mpi_err
58  
59 +  
60 +
61   !! Include mpiComponentPlan type. mpiComponentPlan is a
62   !! dual header file for both c and fortran.
63   #define __FORTRAN90
64   #include "mpiComponentPlan.h"
65  
66 +
67 +
68 + !! Tags used during force loop for parallel simulation
69 +  integer, allocatable, dimension(:) :: tagLocal
70 +  integer, public, allocatable, dimension(:) :: tagRow
71 +  integer ,public, allocatable, dimension(:) :: tagColumn
72 +
73 + !! Logical set true if mpiSimulation has been initialized
74 +  logical :: isSimSet = .false.
75 +
76 +
77 +  type (mpiComponentPlan) :: mpiSim
78 +
79   !! gs_plan contains plans for gather and scatter routines
80    type, public :: gs_plan
81       private
# Line 97 | Line 118 | module mpiSimulation  
118    end interface
119  
120  
121 +
122   contains
123  
124   !! Sets up mpiComponentPlan with structure passed from C++.
125 <  subroutine setupSimParallel(thisComponentPlan,status)
125 >  subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
126   !  Passed Arguments
127   !    integer, intent(inout) :: nDim !! Number of dimensions
128      !! mpiComponentPlan struct from C
129 <    type (mpiComponentPlan), intent(inout) :: thisComponentPlan  
129 >    type (mpiComponentPlan), intent(inout) :: thisComponentPlan
130 > !! Number of tags passed, nlocal  
131 >    integer, intent(in) :: ntags
132 > !! Result status, 0 = normal, -1 = error
133      integer, intent(out) :: status
134 <    integer, intnet(out) :: localStatus
134 >    integer :: localStatus
135 > !! Global reference tag for local particles
136 >    integer, dimension(ntags),intent(inout) :: tags
137  
138 +
139      status = 0
140      if (componentPlanSet) then
141         return
142      endif
115
143      componentPlanSet = .true.
144 +    
145 + !! copy c component plan to fortran  
146 +    mpiSim = thisComponentPlan
147 +    write(*,*) "Seting up simParallel"
148  
149 <    call make_Force_Grid(thisComponentPlan,localStatus)
149 >    call make_Force_Grid(mpiSim,localStatus)
150      if (localStatus /= 0) then
151 +       write(default_error,*) "Error creating force grid"
152         status = -1
153         return
154      endif
155  
156 <    call updateGridComponents(thisComponentPlan,localStatus)
156 >    call updateGridComponents(mpiSim,localStatus)
157      if (localStatus /= 0) then
158 +       write(default_error,*) "Error updating grid components"
159         status = -1
160         return
161      endif
162      
130    !! initialize gather and scatter plans used in this simulation
131    call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
132         thisComponentPlan,row_comm,plan_row)
133    call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
134         thisComponentPlan,row_comm,plan_row3d)
135    call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
136         thisComponentPlan,col_comm,plan_col)
137    call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
138         thisComponentPlan,col_comm,plan_col3d)
163  
164 +    !! initialize gather and scatter plans used in this simulation
165 +    call plan_gather_scatter(1,mpiSim%myNlocal,&
166 +         mpiSim,mpiSim%rowComm,plan_row)
167 +    call plan_gather_scatter(nDim,mpiSim%myNlocal,&
168 +         mpiSim,mpiSim%rowComm,plan_row3d)
169 +    call plan_gather_scatter(1,mpiSim%myNlocal,&
170 +         mpiSim,mpiSim%columnComm,plan_col)
171 +    call plan_gather_scatter(nDim,mpiSim%myNlocal,&
172 +         mpiSim,mpiSim%columnComm,plan_col3d)
173  
174 + !  Initialize tags    
175 +    call setTags(tags,localStatus)
176 +    if (localStatus /= 0) then
177 +       status = -1
178 +       return
179 +    endif
180 +    isSimSet = .true.
181  
182 + !    call printComponentPlan(mpiSim,0)
183    end subroutine setupSimParallel
184  
185    subroutine replanSimParallel(thisComponentPlan,status)
# Line 146 | Line 187 | contains
187      !! mpiComponentPlan struct from C
188      type (mpiComponentPlan), intent(inout) :: thisComponentPlan  
189      integer, intent(out) :: status
190 <    integer, intnet(out) :: localStatus
191 <
190 >    integer :: localStatus
191 >    integer :: mpierror
192      status = 0
193  
194      call updateGridComponents(thisComponentPlan,localStatus)
# Line 164 | Line 205 | contains
205  
206  
207      !! initialize gather and scatter plans used in this simulation
208 <    call plan_gather_scatter(1,thisComponentPlan%nComponentsRow,&
209 <         thisComponentPlan,row_comm,plan_row)
210 <    call plan_gather_scatter(nDim,thisComponentPlan%nComponentsRow,&
211 <         thisComponentPlan,row_comm,plan_row3d)
212 <    call plan_gather_scatter(1,thisComponentPlan%nComponentsColumn,&
213 <         thisComponentPlan,col_comm,plan_col)
214 <    call plan_gather_scatter(nDim,thisComponentPlan%nComponentsColumn,&
215 <         thisComponentPlan,col_comm,plan_col3d)
208 >    call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
209 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row)
210 >    call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
211 >         thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
212 >    call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
213 >         thisComponentPlan,thisComponentPlan%columnComm,plan_col)
214 >    call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
215 >         thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
216  
217  
218  
# Line 187 | Line 228 | contains
228      integer, intent(out) :: status
229      integer :: nComponentsLocal
230      integer :: nComponentsRow = 0
231 <    integer :: nComponensColumn = 0
231 >    integer :: nComponentsColumn = 0
232      integer :: mpiErrors
233  
234      status = 0
# Line 201 | Line 242 | contains
242      nComponentsLocal = thisComponentPlan%myNlocal
243  
244      call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
245 <         mpi_sum,thisComponentPlan%rowComm,mpiError)
245 >         mpi_sum,thisComponentPlan%rowComm,mpiErrors)
246      if (mpiErrors /= 0) then
247         status = -1
248         return
249      endif
250  
251      call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
252 <         mpi_sum,thisComponentPlan%columnComm,mpiError)    
252 >         mpi_sum,thisComponentPlan%columnComm,mpiErrors)    
253      if (mpiErrors /= 0) then
254         status = -1
255         return
# Line 241 | Line 282 | contains
282  
283      if (.not. ComponentPlanSet) return
284      status = 0
285 <
285 >  
286   !! We make a dangerous assumption here that if numberProcessors is
287   !! zero, then we need to get the information from MPI.
288      if (thisComponentPlan%numberProcessors == 0 ) then
# Line 258 | Line 299 | contains
299  
300      else
301         nWorldProcessors = thisComponentPlan%numberProcessors
302 <       myWorldRank = thisComponentPlan%myRank
302 >       myWorldRank = thisComponentPlan%myNode
303      endif
304  
305  
# Line 273 | Line 314 | contains
314      nRows = nWorldProcessors/nColumns
315  
316      rowIndex = myWorldRank/nColumns
317 <    call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
317 >
318 >
319 >
320 >    call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
321      if ( mpiErrors /= 0 ) then
322 +       write(default_error,*) "MPI comm split failed at row communicator"
323         status = -1
324         return
325      endif
326  
327      columnIndex = mod(myWorldRank,nColumns)
328 <    call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
328 >    call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
329      if ( mpiErrors /= 0 ) then
330 +       write(default_error,*) "MPI comm split faild at columnCommunicator"
331         status = -1
332         return
333      endif
# Line 313 | Line 359 | contains
359      integer :: i,junk
360  
361      if (present(status)) status = 0
362 +    
363    
364  
365   !! Set gsComponetPlan pointer
# Line 320 | Line 367 | contains
367   !! WARNING this could be dangerous since thisComponentPlan was origionally
368   !! allocated in C++ and there is a significant difference between c and
369   !! f95 pointers....  
370 <    gsComponentPlan => thisComponetPlan
370 >    this_plan%gsComponentPlan => thisComponentPlan
371  
372   ! Set this plan size for displs array.
373      this_plan%gsPlanSize = nDim * nComponents
# Line 359 | Line 406 | contains
406      end if
407  
408     !! gather all the local sizes into a size # processors array.
409 <    call mpi_allgather(gs_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
410 <         1,mpi_integer,comm,mpi_err)
409 >    call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
410 >         1,mpi_integer,thisComm,mpi_err)
411  
412      if (mpi_err /= 0) then
413         if (present(status)) status = -1
# Line 378 | Line 425 | contains
425         this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
426      end do
427  
428 +
429    end subroutine plan_gather_scatter
430  
431  
# Line 386 | Line 434 | contains
434      
435      
436      this_plan%gsComponentPlan => null()
437 <    call mpi_comm_free(this_plan%comm,mpi_err)
437 >    call mpi_comm_free(this_plan%myPlanComm,mpi_err)
438  
439      deallocate(this_plan%counts)
440      deallocate(this_plan%displs)
# Line 400 | Line 448 | contains
448      integer, dimension(:), intent(in) :: rbuffer
449      integer :: noffset
450      integer, intent(out), optional :: status
451 +    integer :: i
452  
453 +
454 +    
455      if (present(status)) status = 0
456      noffset = this_plan%displs(this_plan%myPlanRank)
457  
458 + !    if (getmyNode() == 1) then
459 + !       write(*,*) "Node 0 printing allgatherv vars"
460 + !       write(*,*) "Noffset: ", noffset
461 + !       write(*,*) "PlanSize: ", this_plan%gsPlanSize
462 + !       write(*,*) "PlanComm: ", this_plan%myPlanComm
463 + !    end if
464 +
465      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
466           rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
467           this_plan%myPlanComm, mpi_err)
# Line 422 | Line 480 | contains
480      integer :: noffset
481      integer, intent(out), optional :: status
482  
483 +
484      if (present(status)) status = 0
485 <    noffset = this_plan%displs(this_plan%me)
485 >    noffset = this_plan%displs(this_plan%myPlanRank)
486  
487      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
488           rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
# Line 496 | Line 555 | contains
555  
556    end subroutine scatter_double_2d
557  
558 +
559 +  subroutine setTags(tags,status)
560 +    integer, dimension(:) :: tags
561 +    integer :: status
562 +
563 +    integer :: alloc_stat
564 +    
565 +    integer :: ncol
566 +    integer :: nrow
567 +
568 +    status = 0
569 + ! allocate row arrays
570 +    nrow = getNrow(plan_row)
571 +    ncol = getNcol(plan_col)
572 +
573 +    if (.not. allocated(tagRow)) then
574 +       allocate(tagRow(nrow),STAT=alloc_stat)
575 +       if (alloc_stat /= 0 ) then
576 +          status = -1
577 +          return
578 +       endif
579 +    else
580 +       deallocate(tagRow)
581 +       allocate(tagRow(nrow),STAT=alloc_stat)
582 +       if (alloc_stat /= 0 ) then
583 +          status = -1
584 +          return
585 +       endif
586 +
587 +    endif
588 + ! allocate column arrays
589 +    if (.not. allocated(tagColumn)) then
590 +       allocate(tagColumn(ncol),STAT=alloc_stat)
591 +       if (alloc_stat /= 0 ) then
592 +          status = -1
593 +          return
594 +       endif
595 +    else
596 +       deallocate(tagColumn)
597 +       allocate(tagColumn(ncol),STAT=alloc_stat)
598 +       if (alloc_stat /= 0 ) then
599 +          status = -1
600 +          return
601 +       endif
602 +    endif
603 +    
604 +    call gather(tags,tagRow,plan_row)
605 +    call gather(tags,tagColumn,plan_col)
606 +
607 +
608 +  end subroutine setTags
609 +
610 +  pure function getNcol(thisplan) result(ncol)
611 +    type (gs_plan), intent(in) :: thisplan
612 +    integer :: ncol
613 +    ncol = thisplan%gsComponentPlan%nComponentsColumn
614 +  end function getNcol
615 +
616 +  pure function getNrow(thisplan) result(ncol)
617 +    type (gs_plan), intent(in) :: thisplan
618 +    integer :: ncol
619 +    ncol = thisplan%gsComponentPlan%nComponentsrow
620 +  end function getNrow
621 +
622 +  function isMPISimSet() result(isthisSimSet)
623 +    logical :: isthisSimSet
624 +    if (isSimSet) then
625 +       isthisSimSet = .true.
626 +    else
627 +       isthisSimSet = .false.
628 +    endif
629 +  end function isMPISimSet
630    
631 < #endif
631 >
632 >
633 >  subroutine printComponentPlan(this_plan,printNode)
634 >
635 >    type (mpiComponentPlan), intent(in) :: this_plan
636 >    integer, optional :: printNode
637 >    logical :: print_me = .false.
638 >
639 >    if (present(printNode)) then
640 >       if (printNode == mpiSim%myNode) print_me = .true.
641 >    else
642 >       print_me = .true.
643 >    endif
644 >
645 >    if (print_me) then
646 >       write(default_error,*) "SetupSimParallel: writing component plan"
647 >      
648 >       write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
649 >       write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
650 >       write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
651 >       write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
652 >       write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
653 >       write(default_error,*) "myMolStart: ", mpiSim%myMolStart
654 >       write(default_error,*) "myMolEnd: ", mpiSim%myMolEnd
655 >       write(default_error,*) "myMol: ", mpiSim%myMol
656 >       write(default_error,*) "myNlocal: ", mpiSim%myNlocal
657 >       write(default_error,*) "myNode: ", mpiSim%myNode
658 >       write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
659 >       write(default_error,*) "rowComm: ", mpiSim%rowComm
660 >       write(default_error,*) "columnComm: ", mpiSim%columnComm
661 >       write(default_error,*) "numberRows: ", mpiSim%numberRows
662 >       write(default_error,*) "numberColumns: ", mpiSim%numberColumns
663 >       write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
664 >       write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
665 >       write(default_error,*) "rowIndex: ", mpiSim%rowIndex
666 >       write(default_error,*) "columnIndex: ", mpiSim%columnIndex
667 >    endif
668 >  end subroutine printComponentPlan
669 >
670 >  function getMyNode() result(myNode)
671 >    integer :: myNode
672 >    myNode = mpiSim%myNode
673 >  end function getMyNode
674 >
675 >
676   end module mpiSimulation
677  

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines