pele
Python energy landscape explorer
/home/js850/projects/pele/source/pele/optimizer.h
00001 #ifndef _PELE_OPTIMIZER_H__
00002 #define _PELE_OPTIMIZER_H__
00003 
00004 #include "base_potential.h"
00005 #include "array.h"
00006 #include <vector>
00007 #include <math.h>
00008 #include <algorithm>
00009 #include <iostream>
00010 #include <iomanip>
00011 #include <memory>
00012 
00013 namespace pele{
00014 
00019 class Optimizer {
00020 public:
00024     virtual ~Optimizer() {}
00025 
00026     virtual void one_iteration() = 0;
00027 
00032     virtual void run() = 0;
00033 
00038     virtual void run(int const niter) = 0;
00039 
00043     inline virtual Array<double> get_x() const = 0;
00044     inline virtual Array<double> get_g() const = 0;
00045     inline virtual double get_f() const = 0;
00046     inline virtual double get_rms() const = 0;
00047     inline virtual int get_nfev() const = 0;
00048     inline virtual int get_niter() const = 0;
00049     inline virtual bool success() = 0;
00050 };
00051 
00056 class GradientOptimizer : public Optimizer {
00057 protected :
00058     // input parameters
00062     std::shared_ptr<pele::BasePotential> potential_;
00063 
00064     double tol_; 
00065     double maxstep_; 
00067     int maxiter_; 
00068     int iprint_; 
00069     int verbosity_; 
00071     int iter_number_; 
00072     int nfev_; 
00074     // variables representing the state of the system
00075     Array<double> x_; 
00076     double f_; 
00077     Array<double> g_; 
00078     double rms_; 
00088     bool func_initialized_;
00089 
00090 public :
00091     GradientOptimizer(std::shared_ptr<pele::BasePotential> potential,
00092           const pele::Array<double> x0, double tol=1e-4)
00093         : potential_(potential),
00094           tol_(tol),
00095           maxstep_(0.1),
00096           maxiter_(1000),
00097           iprint_(-1),
00098           verbosity_(0),
00099           iter_number_(0),
00100           nfev_(0),
00101           x_(x0.copy()),
00102           f_(0.),
00103           g_(x0.size()),
00104           rms_(1e10),
00105           func_initialized_(false)
00106     {}
00107 
00108     virtual ~GradientOptimizer() {}
00109 
00113     virtual void one_iteration() = 0;
00114 
00119     void run(int const niter)
00120     {
00121         if (! func_initialized_){
00122             // note: this needs to be both here and in one_iteration
00123             initialize_func_gradient();
00124         }
00125 
00126         // iterate until the stop criterion is satisfied or maximum number of
00127         // iterations is reached
00128         for (int i = 0; i < niter; ++i) {
00129             if (stop_criterion_satisfied()) {
00130                 break;
00131             }
00132             one_iteration();
00133         }
00134     }
00135 
00140     void run()
00141     {
00142         run(maxiter_ - iter_number_);
00143     }
00144 
00149     virtual void set_func_gradient(double f, Array<double> grad)
00150     {
00151         if (grad.size() != g_.size()){
00152             throw std::invalid_argument("the gradient has the wrong size");
00153         }
00154         if (iter_number_ > 0){
00155             std::cout << "warning: setting f and grad after the first iteration.  this is dangerous.\n";
00156         }
00157         // copy the function and gradient
00158         f_ = f;
00159         g_.assign(grad);
00160         rms_ = norm(g_) / sqrt(g_.size());
00161         func_initialized_ = true;
00162     }
00163 
00164     inline virtual void reset(pele::Array<double> &x0)
00165     {
00166         throw std::runtime_error("GradientOptimizer::reset must be overloaded");
00167     }
00168 
00169     // functions for setting the parameters
00170     inline void set_tol(double tol) { tol_ = tol; }
00171     inline void set_maxstep(double maxstep) { maxstep_ = maxstep; }
00172     inline void set_max_iter(int max_iter) { maxiter_ = max_iter; }
00173     inline void set_iprint(int iprint) { iprint_ = iprint; }
00174     inline void set_verbosity(int verbosity) { verbosity_ = verbosity; }
00175 
00176 
00177     // functions for accessing the status of the optimizer
00178     inline Array<double> get_x() const { return x_.copy(); } //debug
00179     inline Array<double> get_g() const { return g_.copy(); }
00180     inline double get_f() const { return f_; }
00181     inline double get_rms() const { return rms_; }
00182     inline int get_nfev() const { return nfev_; }
00183     inline int get_niter() const { return iter_number_; }
00184     inline int get_maxiter() const { return maxiter_; }
00185     inline double get_maxstep() { return maxstep_; }
00186     inline double get_tol() const {return tol_;}
00187     inline bool success() { return stop_criterion_satisfied(); }
00188 
00192     virtual bool stop_criterion_satisfied()
00193     {
00194         if (! func_initialized_) initialize_func_gradient();
00195         return rms_ <= tol_;
00196     }
00197 
00198 protected :
00199 
00203     void compute_func_gradient(Array<double> x, double & func,
00204             Array<double> gradient)
00205     {
00206         nfev_ += 1;
00207 
00208         // pass the arrays to the potential
00209         func = potential_->get_energy_gradient(x, gradient);
00210     }
00211 
00215     virtual void initialize_func_gradient()
00216     {
00217         // compute the func and gradient at the current locations
00218         // and store them
00219         compute_func_gradient(x_, f_, g_);
00220         rms_ = norm(g_) / sqrt(x_.size());
00221         func_initialized_ = true;
00222     }
00223 
00224 };
00225 }
00226 
00227 #endif
 All Classes Namespaces Functions Variables Typedefs