ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/ConjugateMinimizer.cpp
(Generate patch)

Comparing trunk/OOPSE/libmdtools/ConjugateMinimizer.cpp (file contents):
Revision 1014 by tim, Tue Feb 3 20:47:10 2004 UTC vs.
Revision 1015 by tim, Tue Feb 3 22:54:52 2004 UTC

# Line 1 | Line 1
1   #include "ConjugateMinimizer.hpp"
2 + #include "Utility.hpp"
3 +
4   bool ConjugateMinimizerBase::isSolvable(){
5  
6    //conjuage gradient can only solve unconstrained nonlinear model
7  
8 <  if (!model->hasConstraint())
8 >  if (!model->hasConstraints())
9      return true;
10    else
11      return false;
# Line 19 | Line 21 | void ConjugateMinimizerBase::Minimize(){
21  
22   void ConjugateMinimizerBase::Minimize(){
23    int maxIteration;
24 <  int nextRestIter;
24 >  int nextResetIter;
25    int resetFrq;
26    int nextWriteIter;
27    int writeFrq;
28 +  int lsStatus;
29 +  double gamma;
30 +  double lamda;
31    
32 +  
33    if (!isSolvable()){
34 <    cout << "ConjugateMinimizerBase Error: This nonlinear model can not be solved by " << methodName <<endl;
34 >    cout << "ConjugateMinimizerBase Error: This nonlinear model can not be solved by " << minimizerName <<endl;
35  
36      exit(1);
37    }
# Line 33 | Line 39 | void ConjugateMinimizerBase::Minimize(){
39    printMinizerInfo();
40  
41    resetFrq = paramSet->getResetFrq();
42 <  nextRestIter = resetFrq;
42 >  nextResetIter = resetFrq;
43  
44    writeFrq = paramSet->getWriteFrq();
45    nextWriteIter = writeFrq;
46    
47    prevGrad = model->calcGrad();
48  
49 <  direction = preGrad;
49 >  direction = prevGrad;
50    
51    maxIteration = paramSet->getMaxIteration();
52  
# Line 57 | Line 63 | void ConjugateMinimizerBase::Minimize(){
63      }
64      
65      prevMinX = minX;
66 <    minX = minX + lsMinimizer->getMinVar() * direction;
66 >    lamda = lsMinimizer->getMinVar();
67  
68 +    for(int i = 0; i < direction.size(); i++)
69 +      minX[i] = minX[i] + lamda * direction[i];
70 +
71      //calculate the gradient
72      prevGrad = gradient;
73      
74      gradient = model->calcGrad();
75  
76      // stop if converge
77 <    convStatus = checkConvergence();
69 <    if (convStatus == ){
77 >    if (checkConvergence() > 0){
78        writeOut(minX, currentIter);
79  
80        minStatus = MINSTATUS_CONVERGE;
# Line 75 | Line 83 | void ConjugateMinimizerBase::Minimize(){
83            
84  
85      //calculate the
86 <    gamma = calcGamma(grad, preGrad);
86 >    gamma = calcGamma(gradient, prevGrad);
87  
88      // update new direction
89      prevDirection = direction;
82    direction += gamma * direction;
90  
91 +    for(int i = 0; i < direction.size(); i++)  
92 +      direction[i] += gamma * direction[i];
93 +
94      //
95      if (currentIter == nextWriteIter){
96        nextWriteIter += writeFrq;
# Line 107 | Line 117 | int ConjugateMinimizerBase::checkConvergence(){
117  
118    //test absolute gradient tolerance
119    
120 <  if (norm2(gradient) < paramSet->absGradTol)
120 >  if (sqrt(dot(gradient, gradient)) < paramSet->getGradTol())
121      return 1;
122    else
123      return -1;
# Line 118 | Line 128 | double FRCGMinimizer::calcGamma(vector<double>& newGra
128   }
129  
130   double FRCGMinimizer::calcGamma(vector<double>& newGrad, vector<double>& oldGrad){
131 <  return norm2(newGrad) / norm2(oldGrad);
131 >  return dot(newGrad, newGrad) / dot(oldGrad, newGrad);
132   }
133  
134   double PRCGMinimizer::calcGamma(vector<double>& newGrad, vector<double>& oldGrad){
135    double gamma;
136    vector<double> deltaGrad;
137 +  
138 +  for(int i = 0; i < newGrad.size(); i++)
139 +    deltaGrad.push_back(newGrad[i] - oldGrad[i]);
140  
141 <  deltaGrad = newGrad - oldGrad;
129 <
130 <  return norm(deltaGrad, newGrad) / norm2(oldGrad);
141 >  return dot(deltaGrad, newGrad) / dot(oldGrad, oldGrad);
142    
143   }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines