| 50 |
|
if (simParams->haveHydroPropFile()) { |
| 51 |
|
hydroPropMap = parseFrictionFile(simParams->getHydroPropFile()); |
| 52 |
|
} else { |
| 53 |
< |
//error |
| 53 |
> |
sprintf( painCave.errMsg, |
| 54 |
> |
"HydroPropFile keyword must be set if Lagevin Dynamics is used\n"); |
| 55 |
> |
painCave.severity = OOPSE_ERROR; |
| 56 |
> |
painCave.isFatal = 1; |
| 57 |
> |
simError(); |
| 58 |
|
} |
| 59 |
|
|
| 60 |
|
SimInfo::MoleculeIterator i; |
| 68 |
|
if (iter != hydroPropMap.end()) { |
| 69 |
|
hydroProps_.push_back(iter->second); |
| 70 |
|
} else { |
| 71 |
< |
//error |
| 71 |
> |
sprintf( painCave.errMsg, |
| 72 |
> |
"Can not find resistance tensor for atom [%s]\n", integrableObject->getType().c_str()); |
| 73 |
> |
painCave.severity = OOPSE_ERROR; |
| 74 |
> |
painCave.isFatal = 1; |
| 75 |
> |
simError(); |
| 76 |
|
} |
| 77 |
|
|
| 78 |
|
} |
| 88 |
|
|
| 89 |
|
const unsigned int BufferSize = 65535; |
| 90 |
|
char buffer[BufferSize]; |
| 83 |
– |
Mat3x3d Ddtt; |
| 84 |
– |
Mat3x3d Ddtr; |
| 85 |
– |
Mat3x3d Ddrr; |
| 91 |
|
while (ifs.getline(buffer, BufferSize)) { |
| 92 |
|
StringTokenizer tokenizer(buffer); |
| 93 |
|
HydroProp currProp; |
| 96 |
|
currProp.cor[0] = tokenizer.nextTokenAsDouble(); |
| 97 |
|
currProp.cor[1] = tokenizer.nextTokenAsDouble(); |
| 98 |
|
currProp.cor[2] = tokenizer.nextTokenAsDouble(); |
| 99 |
< |
|
| 95 |
< |
|
| 96 |
< |
Ddtt(0,0) = tokenizer.nextTokenAsDouble(); |
| 97 |
< |
Ddtt(0,1) = tokenizer.nextTokenAsDouble(); |
| 98 |
< |
Ddtt(0,2) = tokenizer.nextTokenAsDouble(); |
| 99 |
< |
Ddtt(1,0) = tokenizer.nextTokenAsDouble(); |
| 100 |
< |
Ddtt(1,1) = tokenizer.nextTokenAsDouble(); |
| 101 |
< |
Ddtt(1,2) = tokenizer.nextTokenAsDouble(); |
| 102 |
< |
Ddtt(2,0) = tokenizer.nextTokenAsDouble(); |
| 103 |
< |
Ddtt(2,1) = tokenizer.nextTokenAsDouble(); |
| 104 |
< |
Ddtt(2,2) = tokenizer.nextTokenAsDouble(); |
| 105 |
< |
|
| 106 |
< |
Ddtr(0,0) = tokenizer.nextTokenAsDouble(); |
| 107 |
< |
Ddtr(0,1) = tokenizer.nextTokenAsDouble(); |
| 108 |
< |
Ddtr(0,2) = tokenizer.nextTokenAsDouble(); |
| 109 |
< |
Ddtr(1,0) = tokenizer.nextTokenAsDouble(); |
| 110 |
< |
Ddtr(1,1) = tokenizer.nextTokenAsDouble(); |
| 111 |
< |
Ddtr(1,2) = tokenizer.nextTokenAsDouble(); |
| 112 |
< |
Ddtr(2,0) = tokenizer.nextTokenAsDouble(); |
| 113 |
< |
Ddtr(2,1) = tokenizer.nextTokenAsDouble(); |
| 114 |
< |
Ddtr(2,2) = tokenizer.nextTokenAsDouble(); |
| 115 |
< |
|
| 116 |
< |
Ddrr(0,0) = tokenizer.nextTokenAsDouble(); |
| 117 |
< |
Ddrr(0,1) = tokenizer.nextTokenAsDouble(); |
| 118 |
< |
Ddrr(0,2) = tokenizer.nextTokenAsDouble(); |
| 119 |
< |
Ddrr(1,0) = tokenizer.nextTokenAsDouble(); |
| 120 |
< |
Ddrr(1,1) = tokenizer.nextTokenAsDouble(); |
| 121 |
< |
Ddrr(1,2) = tokenizer.nextTokenAsDouble(); |
| 122 |
< |
Ddrr(2,0) = tokenizer.nextTokenAsDouble(); |
| 123 |
< |
Ddrr(2,1) = tokenizer.nextTokenAsDouble(); |
| 124 |
< |
Ddrr(2,2) = tokenizer.nextTokenAsDouble(); |
| 125 |
< |
|
| 99 |
> |
|
| 100 |
|
currProp.Xirtt(0,0) = tokenizer.nextTokenAsDouble(); |
| 101 |
|
currProp.Xirtt(0,1) = tokenizer.nextTokenAsDouble(); |
| 102 |
|
currProp.Xirtt(0,2) = tokenizer.nextTokenAsDouble(); |