51 |
|
#include "io/ConstraintWriter.hpp" |
52 |
|
#include "utils/simError.h" |
53 |
|
|
54 |
+ |
// debugging: |
55 |
+ |
#include <unistd.h> |
56 |
+ |
|
57 |
|
namespace OpenMD { |
58 |
< |
ConstraintWriter::ConstraintWriter(SimInfo* info, const std::string& filename) : info_(info) { |
58 |
> |
ConstraintWriter::ConstraintWriter(SimInfo* info, |
59 |
> |
const std::string& filename): info_(info) { |
60 |
|
//use master - slave mode, only master node writes to disk |
61 |
|
#ifdef IS_MPI |
62 |
|
if(worldRank == 0){ |
63 |
|
#endif |
64 |
|
output_.open(filename.c_str()); |
65 |
< |
|
65 |
> |
|
66 |
|
if(!output_){ |
67 |
|
sprintf( painCave.errMsg, |
68 |
< |
"Could not open %s for Constraint output\n", filename.c_str()); |
68 |
> |
"Could not open %s for Constraint output\n", |
69 |
> |
filename.c_str()); |
70 |
|
painCave.isFatal = 1; |
71 |
|
simError(); |
72 |
|
} |
74 |
|
output_ << "#time(fs)\t" |
75 |
|
<< "Index of atom 1\t" |
76 |
|
<< "Index of atom 2\tconstraint force" << std::endl; |
77 |
< |
|
77 |
> |
|
78 |
|
#ifdef IS_MPI |
79 |
|
} |
80 |
< |
#endif |
76 |
< |
|
80 |
> |
#endif |
81 |
|
} |
82 |
< |
|
83 |
< |
ConstraintWriter::~ConstraintWriter() { |
80 |
< |
|
82 |
> |
|
83 |
> |
ConstraintWriter::~ConstraintWriter() { |
84 |
|
#ifdef IS_MPI |
85 |
|
if(worldRank == 0 ){ |
86 |
|
#endif |
89 |
|
} |
90 |
|
#endif |
91 |
|
} |
92 |
< |
|
92 |
> |
|
93 |
|
void ConstraintWriter::writeConstraintForces(const std::list<ConstraintPair*>& constraints){ |
94 |
|
#ifndef IS_MPI |
95 |
|
std::list<ConstraintPair*>::const_iterator i; |
103 |
|
} |
104 |
|
} |
105 |
|
#else |
106 |
< |
|
106 |
> |
|
107 |
|
const int masterNode = 0; |
108 |
|
int nproc; |
109 |
|
int myNode; |
111 |
|
MPI_Comm_rank( MPI_COMM_WORLD, &myNode); |
112 |
|
|
113 |
|
std::vector<int> nConstraints(nproc, 0); |
111 |
– |
|
114 |
|
nConstraints[myNode] = constraints.size(); |
115 |
|
|
116 |
|
//do MPI_ALLREDUCE to exchange the total number of constraints: |
117 |
< |
MPI_Allreduce(MPI_IN_PLACE, &nConstraints[0], nproc, MPI_INT, MPI_SUM, MPI_COMM_WORLD); |
117 |
> |
MPI_Allreduce(MPI_IN_PLACE, &nConstraints[0], nproc, MPI_INT, MPI_SUM, |
118 |
> |
MPI_COMM_WORLD); |
119 |
|
|
120 |
|
MPI_Status ierr; |
121 |
|
int atom1, atom2, doPrint; |
150 |
|
} |
151 |
|
} |
152 |
|
} |
153 |
< |
|
151 |
< |
output_ << info_->getSnapshotManager()->getCurrentSnapshot()->getTime() << std::endl; |
152 |
< |
output_ << constraintData.size() << std::endl; |
153 |
< |
|
153 |
> |
|
154 |
|
std::vector<ConstraintData>::iterator l; |
155 |
|
for (l = constraintData.begin(); l != constraintData.end(); ++l) { |
156 |
|
if (l->printForce) { |
171 |
|
|
172 |
|
MPI_Send(&atom1, 1, MPI_INT, masterNode, 0, MPI_COMM_WORLD); |
173 |
|
MPI_Send(&atom2, 1, MPI_INT, masterNode, 0, MPI_COMM_WORLD); |
174 |
< |
MPI_Send(&constraintForce, 1, MPI_REALTYPE, masterNode, 0, MPI_COMM_WORLD); |
174 |
> |
MPI_Send(&constraintForce, 1, MPI_REALTYPE, masterNode, 0, |
175 |
> |
MPI_COMM_WORLD); |
176 |
|
MPI_Send(&printForce, 1, MPI_INT, masterNode, 0, MPI_COMM_WORLD); |
177 |
|
} |
178 |
|
} |