Simbody
3.4 (development)
|
00001 #ifndef SimTK_SIMMATH_OPTIMIZER_REP_H_ 00002 #define SimTK_SIMMATH_OPTIMIZER_REP_H_ 00003 00004 /* -------------------------------------------------------------------------- * 00005 * Simbody(tm): SimTKmath * 00006 * -------------------------------------------------------------------------- * 00007 * This is part of the SimTK biosimulation toolkit originating from * 00008 * Simbios, the NIH National Center for Physics-Based Simulation of * 00009 * Biological Structures at Stanford, funded under the NIH Roadmap for * 00010 * Medical Research, grant U54 GM072970. See https://simtk.org/home/simbody. * 00011 * * 00012 * Portions copyright (c) 2006-13 Stanford University and the Authors. * 00013 * Authors: Jack Middleton * 00014 * Contributors: Michael Sherman * 00015 * * 00016 * Licensed under the Apache License, Version 2.0 (the "License"); you may * 00017 * not use this file except in compliance with the License. You may obtain a * 00018 * copy of the License at http://www.apache.org/licenses/LICENSE-2.0. * 00019 * * 00020 * Unless required by applicable law or agreed to in writing, software * 00021 * distributed under the License is distributed on an "AS IS" BASIS, * 00022 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * 00023 * See the License for the specific language governing permissions and * 00024 * limitations under the License. * 00025 * -------------------------------------------------------------------------- */ 00026 00027 #include "SimTKcommon.h" 00028 #include "simmath/Optimizer.h" 00029 #include "simmath/Differentiator.h" 00030 #include <map> 00031 00032 namespace SimTK { 00033 00034 00035 /* class for Diff jacobian */ 00036 class SysObjectiveFunc : public Differentiator::GradientFunction { 00037 public: 00038 SysObjectiveFunc(int ny, const OptimizerSystem* sysPtr ) 00039 : Differentiator::GradientFunction(ny) { sysp = sysPtr; } 00040 00041 // Must provide this pure virtual function. 00042 int f(const Vector& y, Real& fy) const { 00043 return(sysp->objectiveFunc(y, true, fy)); // class user's objectiveFunc 00044 } 00045 const OptimizerSystem* sysp; 00046 }; 00047 00048 00049 /* class for Diff gradient */ 00050 class SysConstraintFunc : public Differentiator::JacobianFunction { 00051 public: 00052 SysConstraintFunc(int nf, int ny, const OptimizerSystem* sysPtr) 00053 : Differentiator::JacobianFunction(nf,ny) { sysp = sysPtr; } 00054 00055 // Must provide this pure virtual function. 00056 int f(const Vector& y, Vector& fy) const { 00057 return(sysp->constraintFunc(y, true, fy)); // calls user's contraintFunc 00058 } 00059 const OptimizerSystem* sysp; 00060 }; 00061 00062 00063 class SimTK_SIMMATH_EXPORT Optimizer::OptimizerRep { 00064 public: 00065 virtual ~OptimizerRep(); 00066 OptimizerRep(const OptimizerSystem& sys) 00067 : sysp(&sys), 00068 myHandle(0), 00069 cf(0), 00070 of(0), 00071 jacDiff(0), 00072 gradDiff(0), 00073 convergenceTolerance(Real(1e-3)), 00074 constraintTolerance(Real(1e-4)), 00075 maxIterations(1000), 00076 limitedMemoryHistory(50), 00077 diagnosticsLevel(0), 00078 diffMethod(Differentiator::CentralDifference), 00079 objectiveEstimatedAccuracy(SignificantReal), 00080 constraintsEstimatedAccuracy(SignificantReal), 00081 numericalGradient(false), 00082 numericalJacobian(false) 00083 00084 { 00085 } 00086 OptimizerRep() 00087 : sysp(0), 00088 myHandle(0), 00089 cf(0), 00090 of(0), 00091 jacDiff(0), 00092 gradDiff(0), 00093 convergenceTolerance(Real(1e-3)), 00094 constraintTolerance(Real(1e-4)), 00095 maxIterations(1000), 00096 limitedMemoryHistory(50), 00097 diagnosticsLevel(0), 00098 diffMethod(Differentiator::CentralDifference), 00099 objectiveEstimatedAccuracy(SignificantReal), 00100 constraintsEstimatedAccuracy(SignificantReal), 00101 numericalGradient(false), 00102 numericalJacobian(false) 00103 { 00104 } 00105 00106 virtual OptimizerRep* clone() const { return 0; }; 00107 static bool isAvailable() { return true; } 00108 00109 virtual Real optimize( Vector &results ) = 0; 00110 00111 const OptimizerSystem& getOptimizerSystem() const {return *sysp;} 00112 00113 00114 void setDiagnosticsLevel( const int level ); 00115 void setConvergenceTolerance( Real accuracy ); 00116 void setConstraintTolerance( Real tolerance ); 00117 void setMaxIterations( const int iter ); 00118 void setLimitedMemoryHistory( const int history ); 00119 00120 bool setAdvancedStrOption( const std::string &option, const std::string &value ); 00121 bool setAdvancedRealOption( const std::string &option, const Real value ); 00122 bool setAdvancedIntOption( const std::string &option, const int value ); 00123 bool setAdvancedBoolOption( const std::string &option, const bool value ); 00124 00125 bool getAdvancedStrOption( const std::string &option, std::string &value ) const; 00126 bool getAdvancedRealOption( const std::string &option, Real &value ) const; 00127 bool getAdvancedIntOption( const std::string &option, int &value ) const; 00128 bool getAdvancedBoolOption( const std::string &option, bool &value ) const; 00129 00130 void setMyHandle(Optimizer& cp) {myHandle = &cp;} 00131 const Optimizer& getMyHandle() const {assert(myHandle); return *myHandle;} 00132 void clearMyHandle() {myHandle=0;} 00133 00134 void useNumericalGradient(bool flag, Real objEstAccuracy); 00135 void useNumericalJacobian(bool flag, Real consEstAccuracy); 00136 void setDifferentiatorMethod( Differentiator::Method method); 00137 00138 bool isUsingNumericalGradient() const { return numericalGradient; } 00139 bool isUsingNumericalJacobian() const { return numericalJacobian; } 00140 Differentiator::Method getDifferentiatorMethod() const {return diffMethod;} 00141 Real getEstimatedAccuracyOfObjective() const 00142 { return objectiveEstimatedAccuracy; } 00143 Real getEstimatedAccuracyOfConstraints() const 00144 { return constraintsEstimatedAccuracy; } 00145 00146 const Differentiator& getGradientDifferentiator() const { 00147 assert(gradDiff); 00148 return *gradDiff; 00149 } 00150 00151 const Differentiator& getJacobianDifferentiator() const { 00152 assert(jacDiff); 00153 return *jacDiff; 00154 } 00155 00156 static int numericalGradient_static( const OptimizerSystem&, const Vector & parameters, const bool new_parameters, Vector &gradient ); 00157 static int numericalJacobian_static(const OptimizerSystem&, 00158 const Vector& parameters, const bool new_parameters, Matrix& jacobian ); 00159 00160 protected: 00161 // These methods are to be called by derived classes as an interface 00162 // to the OptimizerSystem virtuals. The signature must match that required by 00163 // IpOpt's matching callbacks. We're using the "user data" argument to pass in 00164 // the current OptimizerRep, making these behave like non-static members. 00165 00166 static int objectiveFuncWrapper ( int n, const Real* x, int new_x, Real* f, void* rep); 00167 static int gradientFuncWrapper ( int n, const Real* x, int new_x, Real* gradient, void* rep); 00168 static int constraintFuncWrapper( int n, const Real* x, int new_x, int m, Real* g, void* rep); 00169 static int constraintJacobianWrapper( int n, const Real* x, int new_x,int m, int nele_jac, 00170 int* iRow, int* jCol, Real* values, void* rep); 00171 static int hessianWrapper( int n, const Real* x, int new_x, Real obj_factor, 00172 int m, Real* lambda, int new_lambda, 00173 int nele_hess, int* iRow, int* jCol, 00174 Real* values, void* rep); 00175 00176 int diagnosticsLevel; 00177 Real convergenceTolerance; 00178 Real constraintTolerance; 00179 int maxIterations; 00180 int limitedMemoryHistory; 00181 Differentiator::Method diffMethod; 00182 Real objectiveEstimatedAccuracy; 00183 Real constraintsEstimatedAccuracy; 00184 00185 private: 00186 const OptimizerSystem* sysp; 00187 bool numericalGradient; // true if optimizer will compute an numerical gradient 00188 bool numericalJacobian; // true if optimizer will compute an numerical Jacobian 00189 Differentiator *gradDiff; 00190 Differentiator *jacDiff; 00191 00192 SysObjectiveFunc *of; 00193 SysConstraintFunc *cf; 00194 00195 std::map<std::string, std::string> advancedStrOptions; 00196 std::map<std::string, Real> advancedRealOptions; 00197 std::map<std::string, int> advancedIntOptions; 00198 std::map<std::string, bool> advancedBoolOptions; 00199 00200 friend class Optimizer; 00201 Optimizer* myHandle; // The owner handle of this Rep. 00202 00203 }; // end class OptimizerRep 00204 00205 class DefaultOptimizer: public Optimizer::OptimizerRep { 00206 Real optimize( Vector &results ); 00207 OptimizerRep* clone() const; 00208 }; 00209 00210 } // namespace SimTK 00211 00212 00213 #endif // SimTK_SIMMATH_OPTIMIZER_REP_H_