| 1 |
#include <algorithm> |
| 2 |
#include <iostream> |
| 3 |
#include <vector> |
| 4 |
|
| 5 |
|
| 6 |
#include "io/ZConsWriter.hpp" |
| 7 |
#include "utils/simError.h" |
| 8 |
|
| 9 |
|
| 10 |
namespace oopse { |
| 11 |
ZConsWriter::ZConsWriter(SimInfo* info, const std::string& filename) : info_(info) { |
| 12 |
//use master - slave mode, only master node writes to disk |
| 13 |
#ifdef IS_MPI |
| 14 |
if(worldRank == 0){ |
| 15 |
#endif |
| 16 |
|
| 17 |
output_.open(filename.c_str()); |
| 18 |
|
| 19 |
if(!output_){ |
| 20 |
sprintf( painCave.errMsg, |
| 21 |
"Could not open %s for z constrain output_ \n", filename.c_str()); |
| 22 |
painCave.isFatal = 1; |
| 23 |
simError(); |
| 24 |
} |
| 25 |
|
| 26 |
output_ << "#time(fs)" << std::endl; |
| 27 |
output_ << "#number of fixed z-constrain molecules" << std::endl; |
| 28 |
output_ << "#global Index of molecule\tzconstrain force\tcurrentZPos" << std::endl; |
| 29 |
|
| 30 |
#ifdef IS_MPI |
| 31 |
} |
| 32 |
#endif |
| 33 |
|
| 34 |
} |
| 35 |
|
| 36 |
ZConsWriter::~ZConsWriter() |
| 37 |
{ |
| 38 |
|
| 39 |
#ifdef IS_MPI |
| 40 |
if(worldRank == 0 ){ |
| 41 |
#endif |
| 42 |
output_.close(); |
| 43 |
#ifdef IS_MPI |
| 44 |
} |
| 45 |
#endif |
| 46 |
} |
| 47 |
|
| 48 |
void ZConsWriter::writeFZ(const std::list<ZconstraintMol>& fixedZmols){ |
| 49 |
#ifndef IS_MPI |
| 50 |
output_ << info_->getSnapshotManager()->getCurrentSnapshot()->getTime() << std::endl; |
| 51 |
output_ << fixedZmols.size() << std::endl; |
| 52 |
|
| 53 |
std::list<ZconstraintMol>::const_iterator i; |
| 54 |
for ( i = fixedZmols.begin(); i != fixedZmols.end(); ++i) { |
| 55 |
output_ << i->mol->getGlobalIndex() <<"\t" << i->fz << "\t" << i->zpos << "\t" << i->param.zTargetPos <<std::endl; |
| 56 |
} |
| 57 |
#else |
| 58 |
int nproc; |
| 59 |
MPI_Comm_size(MPI_COMM_WORLD, &nproc); |
| 60 |
const int masterNode = 0; |
| 61 |
int myNode = worldRank; |
| 62 |
std::vector<int> tmpNFixedZmols(nproc, 0); |
| 63 |
std::vector<int> nFixedZmolsInProc(nproc, 0); |
| 64 |
tmpNFixedZmols[myNode] = fixedZmols.size(); |
| 65 |
|
| 66 |
//do MPI_ALLREDUCE to exchange the total number of atoms, rigidbodies and cutoff groups |
| 67 |
MPI_Allreduce(&tmpNFixedZmols[0], &nFixedZmolsInProc[0], nproc, MPI_INT, |
| 68 |
MPI_SUM, MPI_COMM_WORLD); |
| 69 |
|
| 70 |
MPI_Status ierr; |
| 71 |
int zmolIndex; |
| 72 |
double data[3]; |
| 73 |
|
| 74 |
if (masterNode == 0) { |
| 75 |
|
| 76 |
std::vector<ZconsData> zconsData; |
| 77 |
ZconsData tmpData; |
| 78 |
for(int i =0 ; i < nproc; ++i) { |
| 79 |
if (i == masterNode) { |
| 80 |
std::list<ZconstraintMol>::const_iterator j; |
| 81 |
for ( i = fixedZmols.begin(); i != fixedZmols.end(); ++i) { |
| 82 |
tmpData.zmolIndex = j->mol->getGlobalIndex() ; |
| 83 |
tmpData.zforce= j->fz; |
| 84 |
tmpData.zpos = j->zpos; |
| 85 |
tmpData.zconsPos = j->param.zTargetPos; |
| 86 |
zconsData.push_back(tmpData); |
| 87 |
} |
| 88 |
|
| 89 |
} else { |
| 90 |
for(int k =0 ; k < nFixedZmolsInProc[i]; ++k) { |
| 91 |
MPI_Recv(&zmolIndex, 1, MPI_INT, i, tag, MPI_COMM_WORLD,&ierr); |
| 92 |
MPI_Recv(data, 3, MPI_DOUBLE, i, tag, MPI_COMM_WORLD,&ierr); |
| 93 |
tmpData.zmolIndex = zmolIndex; |
| 94 |
tmpData.zforce= data[0]; |
| 95 |
tmpData.zpos = data[1]; |
| 96 |
tmpData.zconsPos = data[2]; |
| 97 |
zconsData.push_back(tmpData); |
| 98 |
} |
| 99 |
} |
| 100 |
|
| 101 |
} |
| 102 |
|
| 103 |
|
| 104 |
output_ << info_->getSnapshotManager()->getCurrentSnapshot()->getTime() << std::endl; |
| 105 |
output_ << zconsData.size() << std::endl; |
| 106 |
|
| 107 |
std::vector<ZconsData>::iterator l; |
| 108 |
for (l = zconsData.begin(); l != zconsData.end(); ++l) { |
| 109 |
output_ << l->zmolIndex << "\t" << l->zforce << "\t" << l->zpos << "\t" << l->zconsPos << std::endl; |
| 110 |
} |
| 111 |
|
| 112 |
} else { |
| 113 |
|
| 114 |
std::list<ZconstraintMol>::const_iterator j; |
| 115 |
for (j = fixedZmols.begin(); j != fixedZmols.end(); ++j) { |
| 116 |
zmolIndex = j->mol->getGlobalIndex(); |
| 117 |
data[0] = j->fz; |
| 118 |
data[1] = j->zpos; |
| 119 |
data[2] = j->param.zTargetPos; |
| 120 |
MPI_Send(&zmolIndex, 1, MPI_INT, masterNode, tag, MPI_COMM_WORLD); |
| 121 |
MPI_Send(data, 3, MPI_DOUBLE, masterNode, tag, MPI_COMM_WORLD); |
| 122 |
|
| 123 |
} |
| 124 |
} |
| 125 |
#endif |
| 126 |
} |
| 127 |
|
| 128 |
} |