1 |
|
/* |
2 |
< |
* Copyright (c) 2005 The University of Notre Dame. All Rights Reserved. |
2 |
> |
* copyright (c) 2005 The University of Notre Dame. All Rights Reserved. |
3 |
|
* |
4 |
|
* The University of Notre Dame grants you ("Licensee") a |
5 |
|
* non-exclusive, royalty free, license to use, modify and |
100 |
|
#ifdef IS_MPI |
101 |
|
int streamSize; |
102 |
|
const int masterNode = 0; |
103 |
< |
int commStatus; |
103 |
> |
|
104 |
|
if (worldRank == masterNode) { |
105 |
< |
commStatus = MPI_Bcast(&mdFileVersion, 1, MPI_INT, masterNode, MPI_COMM_WORLD); |
105 |
> |
MPI::COMM_WORLD.Bcast(&mdFileVersion, 1, MPI::INT, masterNode); |
106 |
|
#endif |
107 |
|
SimplePreprocessor preprocessor; |
108 |
< |
preprocessor.preprocess(rawMetaDataStream, filename, startOfMetaDataBlock, ppStream); |
108 |
> |
preprocessor.preprocess(rawMetaDataStream, filename, startOfMetaDataBlock, |
109 |
> |
ppStream); |
110 |
|
|
111 |
|
#ifdef IS_MPI |
112 |
|
//brocasting the stream size |
113 |
|
streamSize = ppStream.str().size() +1; |
114 |
< |
commStatus = MPI_Bcast(&streamSize, 1, MPI_LONG, masterNode, MPI_COMM_WORLD); |
115 |
< |
|
116 |
< |
commStatus = MPI_Bcast(static_cast<void*>(const_cast<char*>(ppStream.str().c_str())), streamSize, MPI_CHAR, masterNode, MPI_COMM_WORLD); |
116 |
< |
|
114 |
> |
MPI::COMM_WORLD.Bcast(&streamSize, 1, MPI::LONG, masterNode); |
115 |
> |
MPI::COMM_WORLD.Bcast(static_cast<void*>(const_cast<char*>(ppStream.str().c_str())), |
116 |
> |
streamSize, MPI::CHAR, masterNode); |
117 |
|
|
118 |
|
} else { |
119 |
|
|
120 |
< |
commStatus = MPI_Bcast(&mdFileVersion, 1, MPI_INT, masterNode, MPI_COMM_WORLD); |
120 |
> |
MPI::COMM_WORLD.Bcast(&mdFileVersion, 1, MPI::INT, masterNode); |
121 |
|
|
122 |
|
//get stream size |
123 |
< |
commStatus = MPI_Bcast(&streamSize, 1, MPI_LONG, masterNode, MPI_COMM_WORLD); |
123 |
> |
MPI::COMM_WORLD.Bcast(&streamSize, 1, MPI::LONG, masterNode); |
124 |
|
|
125 |
|
char* buf = new char[streamSize]; |
126 |
|
assert(buf); |
127 |
|
|
128 |
|
//receive file content |
129 |
< |
commStatus = MPI_Bcast(buf, streamSize, MPI_CHAR, masterNode, MPI_COMM_WORLD); |
129 |
> |
MPI::COMM_WORLD.Bcast(buf, streamSize, MPI::CHAR, masterNode); |
130 |
|
|
131 |
|
ppStream.str(buf); |
132 |
|
delete [] buf; |
499 |
|
int nTarget; |
500 |
|
int done; |
501 |
|
int i; |
502 |
– |
int j; |
502 |
|
int loops; |
503 |
|
int which_proc; |
504 |
|
int nProcessors; |
506 |
|
int nGlobalMols = info->getNGlobalMolecules(); |
507 |
|
std::vector<int> molToProcMap(nGlobalMols, -1); // default to an error condition: |
508 |
|
|
509 |
< |
MPI_Comm_size(MPI_COMM_WORLD, &nProcessors); |
509 |
> |
nProcessors = MPI::COMM_WORLD.Get_size(); |
510 |
|
|
511 |
|
if (nProcessors > nGlobalMols) { |
512 |
|
sprintf(painCave.errMsg, |
544 |
|
nTarget = (int)(precast + 0.5); |
545 |
|
|
546 |
|
for(i = 0; i < nGlobalMols; i++) { |
547 |
+ |
|
548 |
|
done = 0; |
549 |
|
loops = 0; |
550 |
|
|
569 |
|
// and be done with it. |
570 |
|
|
571 |
|
if (loops > 100) { |
572 |
+ |
|
573 |
|
sprintf(painCave.errMsg, |
574 |
< |
"I've tried 100 times to assign molecule %d to a " |
575 |
< |
" processor, but can't find a good spot.\n" |
576 |
< |
"I'm assigning it at random to processor %d.\n", |
577 |
< |
i, which_proc); |
578 |
< |
|
574 |
> |
"There have been 100 attempts to assign molecule %d to an\n" |
575 |
> |
"\tunderworked processor, but there's no good place to\n" |
576 |
> |
"\tleave it. OpenMD is assigning it at random to processor %d.\n", |
577 |
> |
i, which_proc); |
578 |
> |
|
579 |
|
painCave.isFatal = 0; |
580 |
+ |
painCave.severity = OPENMD_INFO; |
581 |
|
simError(); |
582 |
|
|
583 |
|
molToProcMap[i] = which_proc; |
622 |
|
} |
623 |
|
|
624 |
|
delete myRandom; |
625 |
< |
|
625 |
> |
|
626 |
|
// Spray out this nonsense to all other processors: |
627 |
< |
|
626 |
< |
MPI_Bcast(&molToProcMap[0], nGlobalMols, MPI_INT, 0, MPI_COMM_WORLD); |
627 |
> |
MPI::COMM_WORLD.Bcast(&molToProcMap[0], nGlobalMols, MPI::INT, 0); |
628 |
|
} else { |
629 |
|
|
630 |
|
// Listen to your marching orders from processor 0: |
631 |
< |
|
632 |
< |
MPI_Bcast(&molToProcMap[0], nGlobalMols, MPI_INT, 0, MPI_COMM_WORLD); |
631 |
> |
MPI::COMM_WORLD.Bcast(&molToProcMap[0], nGlobalMols, MPI::INT, 0); |
632 |
> |
|
633 |
|
} |
634 |
|
|
635 |
|
info->setMolToProcMap(molToProcMap); |
853 |
|
// This would be prettier if we could use MPI_IN_PLACE like the MPI-2 |
854 |
|
// docs said we could. |
855 |
|
std::vector<int> tmpGroupMembership(info->getNGlobalAtoms(), 0); |
856 |
< |
MPI_Allreduce(&globalGroupMembership[0], &tmpGroupMembership[0], nGlobalAtoms, |
857 |
< |
MPI_INT, MPI_SUM, MPI_COMM_WORLD); |
856 |
> |
MPI::COMM_WORLD.Allreduce(&globalGroupMembership[0], |
857 |
> |
&tmpGroupMembership[0], nGlobalAtoms, |
858 |
> |
MPI::INT, MPI::SUM); |
859 |
|
info->setGlobalGroupMembership(tmpGroupMembership); |
860 |
|
#else |
861 |
|
info->setGlobalGroupMembership(globalGroupMembership); |
872 |
|
|
873 |
|
#ifdef IS_MPI |
874 |
|
std::vector<int> tmpMolMembership(info->getNGlobalAtoms(), 0); |
875 |
+ |
MPI::COMM_WORLD.Allreduce(&globalMolMembership[0], &tmpMolMembership[0], |
876 |
+ |
nGlobalAtoms, |
877 |
+ |
MPI::INT, MPI::SUM); |
878 |
|
|
874 |
– |
MPI_Allreduce(&globalMolMembership[0], &tmpMolMembership[0], nGlobalAtoms, |
875 |
– |
MPI_INT, MPI_SUM, MPI_COMM_WORLD); |
876 |
– |
|
879 |
|
info->setGlobalMolMembership(tmpMolMembership); |
880 |
|
#else |
881 |
|
info->setGlobalMolMembership(globalMolMembership); |
891 |
|
|
892 |
|
#ifdef IS_MPI |
893 |
|
std::vector<int> numIntegrableObjectsPerMol(info->getNGlobalMolecules(), 0); |
894 |
< |
MPI_Allreduce(&nIOPerMol[0], &numIntegrableObjectsPerMol[0], |
895 |
< |
info->getNGlobalMolecules(), MPI_INT, MPI_SUM, MPI_COMM_WORLD); |
894 |
> |
MPI::COMM_WORLD.Allreduce(&nIOPerMol[0], &numIntegrableObjectsPerMol[0], |
895 |
> |
info->getNGlobalMolecules(), MPI::INT, MPI::SUM); |
896 |
|
#else |
897 |
|
std::vector<int> numIntegrableObjectsPerMol = nIOPerMol; |
898 |
|
#endif |
922 |
|
} |
923 |
|
|
924 |
|
void SimCreator::loadCoordinates(SimInfo* info, const std::string& mdFileName) { |
923 |
– |
Globals* simParams; |
925 |
|
|
925 |
– |
simParams = info->getSimParams(); |
926 |
– |
|
926 |
|
DumpReader reader(info, mdFileName); |
927 |
|
int nframes = reader.getNFrames(); |
928 |
|
|