71 |
|
lsMinimizer->minimize(direction, 0.0, 0.01); |
72 |
|
|
73 |
|
lsStatus = lsMinimizer->getMinimizationStatus(); |
74 |
+ |
|
75 |
+ |
lamda = lsMinimizer->getMinVar(); |
76 |
|
|
77 |
|
if(lsStatus ==MINSTATUS_ERROR){ |
78 |
< |
minStatus = MINSTATUS_ERROR; |
78 |
> |
if (lamda == 0){ |
79 |
> |
|
80 |
> |
for(int i = 0; i < direction.size(); i++) |
81 |
> |
direction[i] = -prevGrad[i]; |
82 |
> |
|
83 |
> |
continue; |
84 |
> |
} |
85 |
> |
minStatus = MINSTATUS_ERROR; |
86 |
|
return; |
87 |
|
} |
88 |
< |
|
89 |
< |
prevMinX = minX; |
90 |
< |
lamda = lsMinimizer->getMinVar(); |
88 |
> |
else{ |
89 |
> |
prevMinX = minX; |
90 |
> |
} |
91 |
|
|
92 |
|
for(int i = 0; i < direction.size(); i++) |
93 |
|
minX[i] = minX[i] + lamda * direction[i]; |
98 |
|
model->setX(minX); |
99 |
|
gradient = model->calcGrad(); |
100 |
|
|
101 |
+ |
minX = model->getX(); |
102 |
|
// stop if converge |
103 |
|
if (checkConvergence() > 0){ |
104 |
|
writeOut(minX, currentIter); |