| 6 |
|
|
| 7 |
|
model = nlmodel; |
| 8 |
|
//set the dimension |
| 9 |
< |
|
| 9 |
> |
|
| 10 |
|
#ifndef IS_MPI |
| 11 |
|
dim = model->getDim(); |
| 12 |
|
#else |
| 13 |
< |
|
| 13 |
> |
dim = model->getDim(); |
| 14 |
|
#endif |
| 15 |
|
prevGrad.resize(dim); |
| 16 |
|
gradient.resize(dim); |
| 68 |
|
for(currentIter = 1;currentIter <= maxIteration; currentIter++){ |
| 69 |
|
|
| 70 |
|
// perform line search to minimize f(x + lamda * direction) where stepSize > 0 |
| 71 |
< |
lsMinimizer->minimize(direction, 0.0, 1.0); |
| 71 |
> |
lsMinimizer->minimize(direction, 0.0, 0.01); |
| 72 |
|
|
| 73 |
|
lsStatus = lsMinimizer->getMinimizationStatus(); |
| 74 |
|
|
| 122 |
|
|
| 123 |
|
// if writeFrq is not a multipiler of maxIteration, we need to write the final result |
| 124 |
|
// otherwise, we already write it inside the loop, just skip it |
| 125 |
< |
if(currentIter != (nextWriteIter - writeFrq)) |
| 125 |
> |
if(currentIter - 1 != (nextWriteIter - writeFrq)) |
| 126 |
|
writeOut(minX, currentIter); |
| 127 |
|
|
| 128 |
|
minStatus = MINSTATUS_MAXITER; |