| 6 |
|
|
| 7 |
|
model = nlmodel; |
| 8 |
|
//set the dimension |
| 9 |
< |
|
| 9 |
> |
|
| 10 |
|
#ifndef IS_MPI |
| 11 |
|
dim = model->getDim(); |
| 12 |
|
#else |
| 13 |
< |
|
| 13 |
> |
dim = model->getDim(); |
| 14 |
|
#endif |
| 15 |
|
prevGrad.resize(dim); |
| 16 |
|
gradient.resize(dim); |
| 68 |
|
for(currentIter = 1;currentIter <= maxIteration; currentIter++){ |
| 69 |
|
|
| 70 |
|
// perform line search to minimize f(x + lamda * direction) where stepSize > 0 |
| 71 |
< |
lsMinimizer->minimize(direction, 0.0, 1.0); |
| 71 |
> |
lsMinimizer->minimize(direction, 0.0, 0.01); |
| 72 |
|
|
| 73 |
|
lsStatus = lsMinimizer->getMinimizationStatus(); |
| 74 |
+ |
|
| 75 |
+ |
lamda = lsMinimizer->getMinVar(); |
| 76 |
|
|
| 77 |
|
if(lsStatus ==MINSTATUS_ERROR){ |
| 78 |
< |
minStatus = MINSTATUS_ERROR; |
| 78 |
> |
if (lamda == 0){ |
| 79 |
> |
|
| 80 |
> |
for(int i = 0; i < direction.size(); i++) |
| 81 |
> |
direction[i] = -prevGrad[i]; |
| 82 |
> |
|
| 83 |
> |
continue; |
| 84 |
> |
} |
| 85 |
> |
minStatus = MINSTATUS_ERROR; |
| 86 |
|
return; |
| 87 |
|
} |
| 88 |
< |
|
| 89 |
< |
prevMinX = minX; |
| 90 |
< |
lamda = lsMinimizer->getMinVar(); |
| 88 |
> |
else{ |
| 89 |
> |
prevMinX = minX; |
| 90 |
> |
} |
| 91 |
|
|
| 92 |
|
for(int i = 0; i < direction.size(); i++) |
| 93 |
|
minX[i] = minX[i] + lamda * direction[i]; |
| 98 |
|
model->setX(minX); |
| 99 |
|
gradient = model->calcGrad(); |
| 100 |
|
|
| 101 |
+ |
minX = model->getX(); |
| 102 |
|
// stop if converge |
| 103 |
|
if (checkConvergence() > 0){ |
| 104 |
|
writeOut(minX, currentIter); |
| 132 |
|
|
| 133 |
|
// if writeFrq is not a multipiler of maxIteration, we need to write the final result |
| 134 |
|
// otherwise, we already write it inside the loop, just skip it |
| 135 |
< |
if(currentIter != (nextWriteIter - writeFrq)) |
| 135 |
> |
if(currentIter - 1 != (nextWriteIter - writeFrq)) |
| 136 |
|
writeOut(minX, currentIter); |
| 137 |
|
|
| 138 |
|
minStatus = MINSTATUS_MAXITER; |