| 1 | 
  | 
#include <cstring> | 
| 2 | 
  | 
#include <iostream> | 
| 3 | 
  | 
#include <fstream> | 
| 4 | 
+ | 
 | 
| 5 | 
+ | 
#ifdef IS_MPI | 
| 6 | 
  | 
#include <mpi.h> | 
| 7 | 
+ | 
#include "mpiSimulation.hpp" | 
| 8 | 
+ | 
#define TAKE_THIS_TAG 0 | 
| 9 | 
+ | 
#endif //is_mpi | 
| 10 | 
  | 
 | 
| 11 | 
  | 
#include "ReadWrite.hpp" | 
| 12 | 
  | 
#include "simError.h" | 
| 13 | 
  | 
 | 
| 14 | 
  | 
 | 
| 15 | 
+ | 
 | 
| 16 | 
+ | 
 | 
| 17 | 
+ | 
 | 
| 18 | 
  | 
DumpWriter::DumpWriter( SimInfo* the_entry_plug ){ | 
| 19 | 
  | 
 | 
| 20 | 
  | 
#ifdef IS_MPI | 
| 42 | 
  | 
 | 
| 43 | 
  | 
#ifdef IS_MPI | 
| 44 | 
  | 
  } | 
| 45 | 
+ | 
 | 
| 46 | 
+ | 
  sprintf( checkPointMsg, | 
| 47 | 
+ | 
           "Sucessfully opened output file for dumping.\n"); | 
| 48 | 
+ | 
  MPIcheckPoint(); | 
| 49 | 
  | 
#endif // is_mpi | 
| 50 | 
  | 
} | 
| 51 | 
  | 
 | 
| 65 | 
  | 
void DumpWriter::writeDump( double currentTime ){ | 
| 66 | 
  | 
   | 
| 67 | 
  | 
  const int BUFFERSIZE = 2000; | 
| 68 | 
< | 
  char tempBuffer[500]; | 
| 68 | 
> | 
  char tempBuffer[BUFFERSIZE]; | 
| 69 | 
  | 
  char writeLine[BUFFERSIZE]; | 
| 70 | 
  | 
 | 
| 71 | 
  | 
  int i; | 
| 135 | 
  | 
  // write out header and node 0's coordinates | 
| 136 | 
  | 
 | 
| 137 | 
  | 
  if( worldRank == 0 ){ | 
| 138 | 
< | 
    outFile << entry_plug->mpiSim->getTotAtoms() << "\n"; | 
| 138 | 
> | 
    outFile << mpiSim->getTotAtoms() << "\n"; | 
| 139 | 
  | 
       | 
| 140 | 
  | 
    outFile << currentTime << "\t"  | 
| 141 | 
  | 
            << entry_plug->box_x << "\t" | 
| 175 | 
  | 
      else | 
| 176 | 
  | 
        strcat( writeLine, "0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n" ); | 
| 177 | 
  | 
         | 
| 178 | 
< | 
      outfile << writeLine; | 
| 178 | 
> | 
      outFile << writeLine; | 
| 179 | 
  | 
      masterIndex++; | 
| 180 | 
  | 
    } | 
| 181 | 
  | 
    outFile.flush(); | 
| 182 | 
  | 
  } | 
| 183 | 
+ | 
 | 
| 184 | 
+ | 
  sprintf( checkPointMsg, | 
| 185 | 
+ | 
           "Sucessfully wrote node 0's dump configuration.\n"); | 
| 186 | 
+ | 
  MPIcheckPoint(); | 
| 187 | 
  | 
     | 
| 188 | 
< | 
  for (procIndex = 1; procIndex < entry_plug->mpiSim->getNumberProcessors(); | 
| 188 | 
> | 
  for (procIndex = 1; procIndex < mpiSim->getNumberProcessors(); | 
| 189 | 
  | 
       procIndex++){ | 
| 190 | 
  | 
 | 
| 191 | 
  | 
    if( worldRank == 0 ){ | 
| 192 | 
< | 
         | 
| 192 | 
> | 
       | 
| 193 | 
  | 
      mpiErr = MPI_Recv(&nodeAtomsStart,1,MPI_INT,procIndex, | 
| 194 | 
< | 
                        MPI_ANY_TAG,MPI_COMM_WORLD,istatus); | 
| 195 | 
< | 
 | 
| 194 | 
> | 
                        TAKE_THIS_TAG,MPI_COMM_WORLD,istatus); | 
| 195 | 
> | 
       | 
| 196 | 
  | 
      mpiErr = MPI_Recv(&nodeAtomsEnd,1,MPI_INT,procIndex, | 
| 197 | 
< | 
                        MPI_ANY_TAG,MPI_COMM_WORLD, istatus); | 
| 198 | 
< | 
         | 
| 197 | 
> | 
                        TAKE_THIS_TAG,MPI_COMM_WORLD, istatus); | 
| 198 | 
> | 
       | 
| 199 | 
  | 
      // Make sure where node 0 is writing to, matches where the | 
| 200 | 
  | 
      // receiving node expects it to be. | 
| 201 | 
< | 
         | 
| 201 | 
> | 
       | 
| 202 | 
  | 
      if (masterIndex != nodeAtomsStart){ | 
| 203 | 
  | 
        sendError = 1; | 
| 204 | 
< | 
        mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,MPI_ANY_TAG, | 
| 204 | 
> | 
        mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,TAKE_THIS_TAG, | 
| 205 | 
  | 
                          MPI_COMM_WORLD); | 
| 206 | 
  | 
        sprintf(painCave.errMsg, | 
| 207 | 
  | 
                "DumpWriter error: atoms start index (%d) for " | 
| 210 | 
  | 
        painCave.isFatal = 1; | 
| 211 | 
  | 
        simError(); | 
| 212 | 
  | 
      } | 
| 213 | 
< | 
         | 
| 213 | 
> | 
       | 
| 214 | 
  | 
      sendError = 0; | 
| 215 | 
< | 
      mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,MPI_ANY_TAG, | 
| 215 | 
> | 
      mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,TAKE_THIS_TAG, | 
| 216 | 
  | 
                        MPI_COMM_WORLD); | 
| 217 | 
< | 
 | 
| 217 | 
> | 
       | 
| 218 | 
  | 
      // recieve the nodes writeLines | 
| 219 | 
< | 
 | 
| 220 | 
< | 
      for ( i = nodeAtomStart; i <= nodeAtomEnd, i++){ | 
| 221 | 
< | 
           | 
| 222 | 
< | 
        mpiErr = MPI_Recv(&read_buffer,BUFFERSIZE,MPI_CHAR,procIndex, | 
| 223 | 
< | 
                          MPI_ANY_TAG,MPI_COMM_WORLD,istatus ); | 
| 224 | 
< | 
 | 
| 219 | 
> | 
       | 
| 220 | 
> | 
      for ( i = nodeAtomsStart; i <= nodeAtomsEnd; i++){ | 
| 221 | 
> | 
         | 
| 222 | 
> | 
        mpiErr = MPI_Recv(writeLine,BUFFERSIZE,MPI_CHAR,procIndex, | 
| 223 | 
> | 
                          TAKE_THIS_TAG,MPI_COMM_WORLD,istatus ); | 
| 224 | 
> | 
         | 
| 225 | 
  | 
        outFile << writeLine; | 
| 226 | 
  | 
        masterIndex++; | 
| 227 | 
  | 
      } | 
| 229 | 
  | 
 | 
| 230 | 
  | 
    else if( worldRank == procIndex ){ | 
| 231 | 
  | 
 | 
| 232 | 
< | 
      nodeAtomStart = entry_plug->mpiSim->getMyAtomStart(); | 
| 233 | 
< | 
      nodeAtomEnd = entry_plug->mpiSim->getMyAtomEnd(); | 
| 232 | 
> | 
      nodeAtomsStart = mpiSim->getMyAtomStart(); | 
| 233 | 
> | 
      nodeAtomsEnd = mpiSim->getMyAtomEnd(); | 
| 234 | 
  | 
         | 
| 235 | 
< | 
      mpiErr = MPI_Send(&nodeAtomsStart,1,MPI_INT,0,MPI_ANY_TAG, | 
| 235 | 
> | 
      fprintf( stderr, | 
| 236 | 
> | 
               "node %d: myatomStart-> %d; myatomEnd-> %d\n", | 
| 237 | 
> | 
               worldRank, nodeAtomsStart, nodeAtomsEnd ); | 
| 238 | 
> | 
 | 
| 239 | 
> | 
      mpiErr = MPI_Send(&nodeAtomsStart,1,MPI_INT,0,TAKE_THIS_TAG, | 
| 240 | 
  | 
                        MPI_COMM_WORLD); | 
| 241 | 
< | 
      mpiErr = MPI_Send(&nodeAtomsEnd,1,MPI_INT,0,MPI_ANY_TAG, | 
| 241 | 
> | 
      mpiErr = MPI_Send(&nodeAtomsEnd,1,MPI_INT,0,TAKE_THIS_TAG, | 
| 242 | 
  | 
                        MPI_COMM_WORLD); | 
| 243 | 
  | 
         | 
| 244 | 
< | 
      mpiErr = MPI_Recv(&sendError,1,MPI_INT,0,MPI_ANY_TAG, | 
| 244 | 
> | 
      fprintf( stderr, "node %d: sent off the start and end\n", worldRank ); | 
| 245 | 
> | 
 | 
| 246 | 
> | 
      sendError = -1; | 
| 247 | 
> | 
      mpiErr = MPI_Recv(&sendError,1,MPI_INT,0,TAKE_THIS_TAG, | 
| 248 | 
  | 
                        MPI_COMM_WORLD, istatus); | 
| 249 | 
< | 
      if (sendError) mpiCheckpoint(); | 
| 249 | 
> | 
 | 
| 250 | 
> | 
      fprintf( stderr, "node %d: value of sendError is %d\n", worldRank, sendError ); | 
| 251 | 
> | 
 | 
| 252 | 
> | 
      if (sendError) MPIcheckPoint(); | 
| 253 | 
  | 
 | 
| 254 | 
  | 
      // send current node's configuration line by line. | 
| 255 | 
  | 
 | 
| 285 | 
  | 
        else | 
| 286 | 
  | 
          strcat( writeLine, "0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n" ); | 
| 287 | 
  | 
           | 
| 288 | 
< | 
        mpiErr = MPI_Send(writeLine,BUFFERSIZE,MPI_CHAR,0,MPI_ANY_TAG, | 
| 288 | 
> | 
        fprintf( stderr, | 
| 289 | 
> | 
                 "node %d: I'm sending the line:\n->%s\n", worldRank, writeLine ); | 
| 290 | 
> | 
         | 
| 291 | 
> | 
        mpiErr = MPI_Send(writeLine,BUFFERSIZE,MPI_CHAR,0,TAKE_THIS_TAG, | 
| 292 | 
  | 
                          MPI_COMM_WORLD); | 
| 293 | 
  | 
      } | 
| 294 | 
  | 
    } | 
| 295 | 
  | 
       | 
| 296 | 
  | 
    sprintf(checkPointMsg,"Node %d sent dump configuration.", | 
| 297 | 
  | 
            procIndex); | 
| 298 | 
< | 
    mpiCheckPoint(); | 
| 298 | 
> | 
    MPIcheckPoint(); | 
| 299 | 
  | 
  } | 
| 300 | 
  | 
     | 
| 301 | 
  | 
#endif // is_mpi | 
| 340 | 
  | 
#ifdef IS_MPI | 
| 341 | 
  | 
  } | 
| 342 | 
  | 
   | 
| 343 | 
< | 
  sprintf(checkPointMsg,"Opened file for final configuration\n",procIndex); | 
| 344 | 
< | 
  mpiCheckPoint();   | 
| 343 | 
> | 
  sprintf(checkPointMsg,"Opened file for final configuration\n"); | 
| 344 | 
> | 
  MPIcheckPoint();   | 
| 345 | 
  | 
   | 
| 346 | 
  | 
#endif //is_mpi | 
| 347 | 
  | 
 | 
| 351 | 
  | 
     | 
| 352 | 
  | 
  finalOut << nAtoms << "\n"; | 
| 353 | 
  | 
     | 
| 354 | 
< | 
  finalOut << currentTime << "\t"  | 
| 355 | 
< | 
          << entry_plug->box_x << "\t" | 
| 356 | 
< | 
          << entry_plug->box_y << "\t" | 
| 328 | 
< | 
          << entry_plug->box_z << "\n"; | 
| 354 | 
> | 
  finalOut << entry_plug->box_x << "\t" | 
| 355 | 
> | 
           << entry_plug->box_y << "\t" | 
| 356 | 
> | 
           << entry_plug->box_z << "\n"; | 
| 357 | 
  | 
     | 
| 358 | 
  | 
  for( i=0; i<nAtoms; i++ ){ | 
| 359 | 
  | 
       | 
| 406 | 
  | 
  // write out header and node 0's coordinates | 
| 407 | 
  | 
 | 
| 408 | 
  | 
  if( worldRank == 0 ){ | 
| 409 | 
< | 
    finalOut << entry_plug->mpiSim->getTotAtoms() << "\n"; | 
| 409 | 
> | 
    finalOut << mpiSim->getTotAtoms() << "\n"; | 
| 410 | 
  | 
       | 
| 411 | 
< | 
    finalOut << currentTime << "\t"  | 
| 412 | 
< | 
            << entry_plug->box_x << "\t" | 
| 413 | 
< | 
            << entry_plug->box_y << "\t" | 
| 414 | 
< | 
            << entry_plug->box_z << "\n"; | 
| 387 | 
< | 
 | 
| 411 | 
> | 
    finalOut << entry_plug->box_x << "\t" | 
| 412 | 
> | 
             << entry_plug->box_y << "\t" | 
| 413 | 
> | 
             << entry_plug->box_z << "\n"; | 
| 414 | 
> | 
     | 
| 415 | 
  | 
    masterIndex = 0; | 
| 416 | 
+ | 
     | 
| 417 | 
+ | 
    std::cerr << "about to write node 0 aztoms. nAtoms = " << nAtoms << "\n"; | 
| 418 | 
+ | 
     | 
| 419 | 
  | 
    for( i=0; i<nAtoms; i++ ){ | 
| 420 | 
  | 
       | 
| 421 | 
  | 
      sprintf( tempBuffer, | 
| 448 | 
  | 
      else | 
| 449 | 
  | 
        strcat( writeLine, "0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n" ); | 
| 450 | 
  | 
         | 
| 451 | 
< | 
      outfile << writeLine; | 
| 451 | 
> | 
      finalOut << writeLine; | 
| 452 | 
  | 
      masterIndex++; | 
| 453 | 
  | 
    } | 
| 454 | 
  | 
    finalOut.flush(); | 
| 455 | 
  | 
  } | 
| 456 | 
  | 
     | 
| 457 | 
< | 
  for (procIndex = 1; procIndex < entry_plug->mpiSim->getNumberProcessors(); | 
| 457 | 
> | 
  for (procIndex = 1; procIndex < mpiSim->getNumberProcessors(); | 
| 458 | 
  | 
       procIndex++){ | 
| 459 | 
  | 
 | 
| 460 | 
  | 
    if( worldRank == 0 ){ | 
| 461 | 
  | 
         | 
| 462 | 
  | 
      mpiErr = MPI_Recv(&nodeAtomsStart,1,MPI_INT,procIndex, | 
| 463 | 
< | 
                        MPI_ANY_TAG,MPI_COMM_WORLD,istatus); | 
| 463 | 
> | 
                        TAKE_THIS_TAG,MPI_COMM_WORLD,istatus); | 
| 464 | 
  | 
 | 
| 465 | 
  | 
      mpiErr = MPI_Recv(&nodeAtomsEnd,1,MPI_INT,procIndex, | 
| 466 | 
< | 
                        MPI_ANY_TAG,MPI_COMM_WORLD, istatus); | 
| 466 | 
> | 
                        TAKE_THIS_TAG,MPI_COMM_WORLD, istatus); | 
| 467 | 
  | 
         | 
| 468 | 
  | 
      // Make sure where node 0 is writing to, matches where the | 
| 469 | 
  | 
      // receiving node expects it to be. | 
| 470 | 
  | 
         | 
| 471 | 
  | 
      if (masterIndex != nodeAtomsStart){ | 
| 472 | 
  | 
        sendError = 1; | 
| 473 | 
< | 
        mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,MPI_ANY_TAG, | 
| 473 | 
> | 
        mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,TAKE_THIS_TAG, | 
| 474 | 
  | 
                          MPI_COMM_WORLD); | 
| 475 | 
  | 
        sprintf(painCave.errMsg, | 
| 476 | 
  | 
                "DumpWriter error: atoms start index (%d) for " | 
| 481 | 
  | 
      } | 
| 482 | 
  | 
         | 
| 483 | 
  | 
      sendError = 0; | 
| 484 | 
< | 
      mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,MPI_ANY_TAG, | 
| 484 | 
> | 
      mpiErr = MPI_Send(&sendError,1,MPI_INT,procIndex,TAKE_THIS_TAG, | 
| 485 | 
  | 
                        MPI_COMM_WORLD); | 
| 486 | 
  | 
 | 
| 487 | 
  | 
      // recieve the nodes writeLines | 
| 488 | 
  | 
 | 
| 489 | 
< | 
      for ( i = nodeAtomStart; i <= nodeAtomEnd, i++){ | 
| 489 | 
> | 
      for ( i = nodeAtomsStart; i <= nodeAtomsEnd; i++){ | 
| 490 | 
  | 
           | 
| 491 | 
< | 
        mpiErr = MPI_Recv(&read_buffer,BUFFERSIZE,MPI_CHAR,procIndex, | 
| 492 | 
< | 
                          MPI_ANY_TAG,MPI_COMM_WORLD,istatus ); | 
| 491 | 
> | 
        mpiErr = MPI_Recv(writeLine,BUFFERSIZE,MPI_CHAR,procIndex, | 
| 492 | 
> | 
                          TAKE_THIS_TAG,MPI_COMM_WORLD,istatus ); | 
| 493 | 
  | 
 | 
| 494 | 
  | 
        finalOut << writeLine; | 
| 495 | 
  | 
        masterIndex++; | 
| 500 | 
  | 
 | 
| 501 | 
  | 
    else if( worldRank == procIndex ){ | 
| 502 | 
  | 
 | 
| 503 | 
< | 
      nodeAtomStart = entry_plug->mpiSim->getMyAtomStart(); | 
| 504 | 
< | 
      nodeAtomEnd = entry_plug->mpiSim->getMyAtomEnd(); | 
| 503 | 
> | 
      nodeAtomsStart = mpiSim->getMyAtomStart(); | 
| 504 | 
> | 
      nodeAtomsEnd = mpiSim->getMyAtomEnd(); | 
| 505 | 
  | 
         | 
| 506 | 
< | 
      mpiErr = MPI_Send(&nodeAtomsStart,1,MPI_INT,0,MPI_ANY_TAG, | 
| 506 | 
> | 
      mpiErr = MPI_Send(&nodeAtomsStart,1,MPI_INT,0,TAKE_THIS_TAG, | 
| 507 | 
  | 
                        MPI_COMM_WORLD); | 
| 508 | 
< | 
      mpiErr = MPI_Send(&nodeAtomsEnd,1,MPI_INT,0,MPI_ANY_TAG, | 
| 508 | 
> | 
      mpiErr = MPI_Send(&nodeAtomsEnd,1,MPI_INT,0,TAKE_THIS_TAG, | 
| 509 | 
  | 
                        MPI_COMM_WORLD); | 
| 510 | 
  | 
         | 
| 511 | 
< | 
      mpiErr = MPI_Recv(&sendError,1,MPI_INT,0,MPI_ANY_TAG, | 
| 511 | 
> | 
      mpiErr = MPI_Recv(&sendError,1,MPI_INT,0,TAKE_THIS_TAG, | 
| 512 | 
  | 
                        MPI_COMM_WORLD, istatus); | 
| 513 | 
< | 
      if (sendError) mpiCheckpoint(); | 
| 513 | 
> | 
      if (sendError) MPIcheckPoint(); | 
| 514 | 
  | 
 | 
| 515 | 
  | 
      // send current node's configuration line by line. | 
| 516 | 
  | 
 | 
| 546 | 
  | 
        else | 
| 547 | 
  | 
          strcat( writeLine, "0.0\t0.0\t0.0\t0.0\t0.0\t0.0\t0.0\n" ); | 
| 548 | 
  | 
           | 
| 549 | 
< | 
        mpiErr = MPI_Send(writeLine,BUFFERSIZE,MPI_CHAR,0,MPI_ANY_TAG, | 
| 549 | 
> | 
        mpiErr = MPI_Send(writeLine,BUFFERSIZE,MPI_CHAR,0,TAKE_THIS_TAG, | 
| 550 | 
  | 
                          MPI_COMM_WORLD); | 
| 551 | 
  | 
      } | 
| 552 | 
  | 
    } | 
| 553 | 
  | 
       | 
| 554 | 
  | 
    sprintf(checkPointMsg,"Node %d sent dump configuration.", | 
| 555 | 
  | 
            procIndex); | 
| 556 | 
< | 
    mpiCheckPoint(); | 
| 556 | 
> | 
    MPIcheckPoint(); | 
| 557 | 
  | 
  } | 
| 558 | 
  | 
 | 
| 559 | 
  | 
  if( worldRank == 0 ) finalOut.close(); |