| 7 |
|
!! |
| 8 |
|
!! @author Charles F. Vardeman II |
| 9 |
|
!! @author Matthew Meineke |
| 10 |
< |
!! @version $Id: mpiSimulation_module.F90,v 1.13 2004-05-27 00:48:12 tim Exp $, $Date: 2004-05-27 00:48:12 $, $Name: not supported by cvs2svn $, $Revision: 1.13 $ |
| 10 |
> |
!! @version $Id: mpiSimulation_module.F90,v 1.15 2004-06-01 18:42:58 gezelter Exp $, $Date: 2004-06-01 18:42:58 $, $Name: not supported by cvs2svn $, $Revision: 1.15 $ |
| 11 |
|
|
| 12 |
|
module mpiSimulation |
| 13 |
|
use definitions |
| 139 |
|
|
| 140 |
|
contains |
| 141 |
|
|
| 142 |
< |
!! Sets up mpiComponentPlan with structure passed from C++. |
| 143 |
< |
subroutine setupSimParallel(thisComponentPlan,nAtomTags,atomTags,status) |
| 144 |
< |
! Passed Arguments |
| 142 |
> |
!! Sets up mpiComponentPlan with structure passed from C++. |
| 143 |
> |
subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, & |
| 144 |
> |
nGroupTags, groupTags, status) |
| 145 |
> |
!! Passed Arguments |
| 146 |
|
!! mpiComponentPlan struct from C |
| 147 |
|
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
| 148 |
< |
!! Number of tags passed, nlocal |
| 149 |
< |
integer, intent(in) :: nAtomTags |
| 150 |
< |
!! Result status, 0 = normal, -1 = error |
| 148 |
> |
!! Number of tags passed |
| 149 |
> |
integer, intent(in) :: nAtomTags, nGroupTags |
| 150 |
> |
!! Result status, 0 = normal, -1 = error |
| 151 |
|
integer, intent(out) :: status |
| 152 |
|
integer :: localStatus |
| 153 |
< |
!! Global reference tag for local particles |
| 154 |
< |
integer, dimension(nAtomTags),intent(inout) :: atomTags |
| 153 |
> |
!! Global reference tag for local particles |
| 154 |
> |
integer, dimension(nAtomTags), intent(inout) :: atomTags |
| 155 |
> |
integer, dimension(nGroupTags), intent(inout) :: groupTags |
| 156 |
|
|
| 157 |
< |
write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & |
| 158 |
< |
' has atomTags(1) = ', atomTags(1) |
| 159 |
< |
|
| 157 |
> |
!write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & |
| 158 |
> |
! ' has atomTags(1) = ', atomTags(1) |
| 159 |
> |
|
| 160 |
|
status = 0 |
| 161 |
|
if (componentPlanSet) then |
| 162 |
|
return |
| 163 |
|
endif |
| 164 |
|
componentPlanSet = .true. |
| 165 |
|
|
| 166 |
< |
!! copy c component plan to fortran |
| 166 |
> |
!! copy c component plan to fortran |
| 167 |
|
mpiSim = thisComponentPlan |
| 168 |
< |
write(*,*) "Seting up simParallel" |
| 169 |
< |
|
| 168 |
> |
!write(*,*) "Seting up simParallel" |
| 169 |
> |
|
| 170 |
|
call make_Force_Grid(mpiSim, localStatus) |
| 171 |
|
if (localStatus /= 0) then |
| 172 |
|
write(default_error,*) "Error creating force grid" |
| 173 |
|
status = -1 |
| 174 |
|
return |
| 175 |
|
endif |
| 176 |
< |
|
| 176 |
> |
|
| 177 |
|
call updateGridComponents(mpiSim, localStatus) |
| 178 |
|
if (localStatus /= 0) then |
| 179 |
|
write(default_error,*) "Error updating grid components" |
| 180 |
|
status = -1 |
| 181 |
|
return |
| 182 |
< |
endif |
| 182 |
> |
endif |
| 183 |
|
|
| 184 |
|
!! initialize gather and scatter plans used in this simulation |
| 185 |
|
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
| 211 |
|
status = -1 |
| 212 |
|
return |
| 213 |
|
endif |
| 214 |
+ |
|
| 215 |
+ |
|
| 216 |
+ |
call setGroupTags(groupTags,localStatus) |
| 217 |
+ |
if (localStatus /= 0) then |
| 218 |
+ |
status = -1 |
| 219 |
+ |
return |
| 220 |
+ |
endif |
| 221 |
+ |
|
| 222 |
|
isSimSet = .true. |
| 223 |
|
|
| 224 |
|
! call printComponentPlan(mpiSim,0) |
| 277 |
|
|
| 278 |
|
end subroutine replanSimParallel |
| 279 |
|
|
| 280 |
< |
!! Updates number of row and column components for long range forces. |
| 281 |
< |
subroutine updateGridComponents(thisComponentPlan,status) |
| 280 |
> |
!! Updates number of row and column components for long range forces. |
| 281 |
> |
subroutine updateGridComponents(thisComponentPlan, status) |
| 282 |
|
type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan |
| 283 |
< |
|
| 284 |
< |
!! Status return |
| 285 |
< |
!! - 0 Success |
| 286 |
< |
!! - -1 Failure |
| 283 |
> |
|
| 284 |
> |
!! Status return |
| 285 |
> |
!! - 0 Success |
| 286 |
> |
!! - -1 Failure |
| 287 |
|
integer, intent(out) :: status |
| 288 |
|
integer :: nAtomsLocal |
| 289 |
|
integer :: nAtomsInRow = 0 |
| 301 |
|
return |
| 302 |
|
endif |
| 303 |
|
if (thisComponentPlan%nGroupsLocal == 0) then |
| 304 |
+ |
write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal |
| 305 |
|
status = -1 |
| 306 |
|
return |
| 307 |
|
endif |
| 345 |
|
end subroutine updateGridComponents |
| 346 |
|
|
| 347 |
|
|
| 348 |
< |
!! Creates a square force decomposition of processors into row and column |
| 349 |
< |
!! communicators. |
| 348 |
> |
!! Creates a square force decomposition of processors into row and column |
| 349 |
> |
!! communicators. |
| 350 |
|
subroutine make_Force_Grid(thisComponentPlan,status) |
| 351 |
|
type (mpiComponentPlan) :: thisComponentPlan |
| 352 |
|
integer, intent(out) :: status !! status returns -1 if error |
| 353 |
< |
integer :: nColumnsMax !! Maximum number of columns |
| 354 |
< |
integer :: nWorldProcessors !! Total number of processors in World comm. |
| 353 |
> |
integer :: nColumnsMax !! Maximum number of columns |
| 354 |
> |
integer :: nWorldProcessors !! Total number of processors in World comm. |
| 355 |
|
integer :: rowIndex !! Row for this processor. |
| 356 |
|
integer :: columnIndex !! Column for this processor. |
| 357 |
|
integer :: nRows !! Total number of rows. |
| 366 |
|
if (.not. ComponentPlanSet) return |
| 367 |
|
status = 0 |
| 368 |
|
|
| 369 |
< |
!! We make a dangerous assumption here that if numberProcessors is |
| 370 |
< |
!! zero, then we need to get the information from MPI. |
| 369 |
> |
!! We make a dangerous assumption here that if numberProcessors is |
| 370 |
> |
!! zero, then we need to get the information from MPI. |
| 371 |
|
if (thisComponentPlan%nProcessors == 0 ) then |
| 372 |
|
call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors) |
| 373 |
|
if ( mpiErrors /= 0 ) then |
| 379 |
|
status = -1 |
| 380 |
|
return |
| 381 |
|
endif |
| 382 |
< |
|
| 382 |
> |
|
| 383 |
|
else |
| 384 |
|
nWorldProcessors = thisComponentPlan%nProcessors |
| 385 |
|
myWorldRank = thisComponentPlan%myNode |
| 386 |
|
endif |
| 387 |
< |
|
| 387 |
> |
|
| 388 |
|
nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp))) |
| 389 |
< |
|
| 389 |
> |
|
| 390 |
|
do i = 1, nColumnsMax |
| 391 |
|
if (mod(nWorldProcessors,i) == 0) nColumns = i |
| 392 |
|
end do |
| 393 |
< |
|
| 393 |
> |
|
| 394 |
|
nRows = nWorldProcessors/nColumns |
| 395 |
< |
|
| 395 |
> |
|
| 396 |
|
rowIndex = myWorldRank/nColumns |
| 397 |
< |
|
| 397 |
> |
|
| 398 |
|
call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors) |
| 399 |
|
if ( mpiErrors /= 0 ) then |
| 400 |
|
write(default_error,*) "MPI comm split failed at row communicator" |
| 401 |
|
status = -1 |
| 402 |
|
return |
| 403 |
|
endif |
| 404 |
< |
|
| 404 |
> |
|
| 405 |
|
columnIndex = mod(myWorldRank,nColumns) |
| 406 |
|
call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors) |
| 407 |
|
if ( mpiErrors /= 0 ) then |
| 409 |
|
status = -1 |
| 410 |
|
return |
| 411 |
|
endif |
| 412 |
< |
|
| 413 |
< |
! Set appropriate components of thisComponentPlan |
| 412 |
> |
|
| 413 |
> |
! Set appropriate components of thisComponentPlan |
| 414 |
|
thisComponentPlan%rowComm = rowCommunicator |
| 415 |
|
thisComponentPlan%columnComm = columnCommunicator |
| 416 |
|
thisComponentPlan%rowIndex = rowIndex |
| 419 |
|
thisComponentPlan%nColumns = nColumns |
| 420 |
|
|
| 421 |
|
end subroutine make_Force_Grid |
| 422 |
< |
|
| 422 |
> |
|
| 423 |
|
!! initalizes a gather scatter plan |
| 424 |
|
subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, & |
| 425 |
|
thisComm, this_plan, status) |
| 717 |
|
call gather(tags, AtomColToGlobal, plan_atom_col) |
| 718 |
|
|
| 719 |
|
end subroutine setAtomTags |
| 720 |
+ |
|
| 721 |
+ |
subroutine setGroupTags(tags, status) |
| 722 |
+ |
integer, dimension(:) :: tags |
| 723 |
+ |
integer :: status |
| 724 |
+ |
|
| 725 |
+ |
integer :: alloc_stat |
| 726 |
+ |
|
| 727 |
+ |
integer :: nGroupsInCol |
| 728 |
+ |
integer :: nGroupsInRow |
| 729 |
+ |
|
| 730 |
+ |
status = 0 |
| 731 |
+ |
|
| 732 |
+ |
nGroupsInRow = getNgroupsInRow(plan_group_row) |
| 733 |
+ |
nGroupsInCol = getNgroupsInCol(plan_group_col) |
| 734 |
+ |
|
| 735 |
+ |
if(allocated(GroupLocalToGlobal)) then |
| 736 |
+ |
deallocate(GroupLocalToGlobal) |
| 737 |
+ |
endif |
| 738 |
+ |
allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat) |
| 739 |
+ |
if (alloc_stat /= 0 ) then |
| 740 |
+ |
status = -1 |
| 741 |
+ |
return |
| 742 |
+ |
endif |
| 743 |
+ |
|
| 744 |
+ |
GroupLocalToGlobal = tags |
| 745 |
+ |
|
| 746 |
+ |
if(allocated(GroupRowToGlobal)) then |
| 747 |
+ |
deallocate(GroupRowToGlobal) |
| 748 |
+ |
endif |
| 749 |
+ |
allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat) |
| 750 |
+ |
if (alloc_stat /= 0 ) then |
| 751 |
+ |
status = -1 |
| 752 |
+ |
return |
| 753 |
+ |
endif |
| 754 |
+ |
|
| 755 |
+ |
if(allocated(GroupColToGlobal)) then |
| 756 |
+ |
deallocate(GroupColToGlobal) |
| 757 |
+ |
endif |
| 758 |
+ |
allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat) |
| 759 |
+ |
if (alloc_stat /= 0 ) then |
| 760 |
+ |
status = -1 |
| 761 |
+ |
return |
| 762 |
+ |
endif |
| 763 |
+ |
|
| 764 |
+ |
call gather(tags, GroupRowToGlobal, plan_group_row) |
| 765 |
+ |
call gather(tags, GroupColToGlobal, plan_group_col) |
| 766 |
+ |
|
| 767 |
+ |
end subroutine setGroupTags |
| 768 |
|
|
| 769 |
|
function getNatomsInCol(thisplan) result(nInCol) |
| 770 |
|
type (gs_plan), intent(in) :: thisplan |