mlpack  git-master
adam.hpp
Go to the documentation of this file.
1 
22 #ifndef MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_HPP
23 #define MLPACK_CORE_OPTIMIZERS_ADAM_ADAM_HPP
24 
25 #include <mlpack/prereqs.hpp>
26 
28 #include "adam_update.hpp"
29 #include "adamax_update.hpp"
30 #include "amsgrad_update.hpp"
31 #include "nadam_update.hpp"
32 #include "nadamax_update.hpp"
34 
35 namespace mlpack {
36 namespace optimization {
37 
86 template<typename UpdateRule = AdamUpdate>
87 class AdamType
88 {
89  public:
110  AdamType(const double stepSize = 0.001,
111  const size_t batchSize = 32,
112  const double beta1 = 0.9,
113  const double beta2 = 0.999,
114  const double eps = 1e-8,
115  const size_t maxIterations = 100000,
116  const double tolerance = 1e-5,
117  const bool shuffle = true);
118 
129  template<typename DecomposableFunctionType>
130  double Optimize(DecomposableFunctionType& function, arma::mat& iterate)
131  {
132  return optimizer.Optimize(function, iterate);
133  }
134 
136  double StepSize() const { return optimizer.StepSize(); }
138  double& StepSize() { return optimizer.StepSize(); }
139 
141  size_t BatchSize() const { return optimizer.BatchSize(); }
143  size_t& BatchSize() { return optimizer.BatchSize(); }
144 
146  double Beta1() const { return optimizer.UpdatePolicy().Beta1(); }
148  double& Beta1() { return optimizer.UpdatePolicy().Beta1(); }
149 
151  double Beta2() const { return optimizer.UpdatePolicy().Beta2(); }
153  double& Beta2() { return optimizer.UpdatePolicy().Beta2(); }
154 
156  double Epsilon() const { return optimizer.UpdatePolicy().Epsilon(); }
158  double& Epsilon() { return optimizer.UpdatePolicy().Epsilon(); }
159 
161  size_t MaxIterations() const { return optimizer.MaxIterations(); }
163  size_t& MaxIterations() { return optimizer.MaxIterations(); }
164 
166  double Tolerance() const { return optimizer.Tolerance(); }
168  double& Tolerance() { return optimizer.Tolerance(); }
169 
171  bool Shuffle() const { return optimizer.Shuffle(); }
173  bool& Shuffle() { return optimizer.Shuffle(); }
174 
175  private:
177  SGD<UpdateRule> optimizer;
178 };
179 
181 
183 
185 
187 
189 
191 
192 } // namespace optimization
193 } // namespace mlpack
194 
195 // Include implementation.
196 #include "adam_impl.hpp"
197 
198 #endif
double & Tolerance()
Modify the tolerance for termination.
Definition: adam.hpp:168
bool Shuffle() const
Get whether or not the individual functions are shuffled.
Definition: sgd.hpp:154
bool & Shuffle()
Modify whether or not the individual functions are shuffled.
Definition: adam.hpp:173
const UpdatePolicyType & UpdatePolicy() const
Get the update policy.
Definition: sgd.hpp:166
.hpp
Definition: add_to_po.hpp:21
size_t MaxIterations() const
Get the maximum number of iterations (0 indicates no limit).
Definition: adam.hpp:161
double Epsilon() const
Get the value used to initialise the mean squared gradient parameter.
Definition: adam.hpp:156
double & Beta2()
Modify the second moment coefficient.
Definition: adam.hpp:153
size_t BatchSize() const
Get the batch size.
Definition: sgd.hpp:139
The core includes that mlpack expects; standard C++ includes and Armadillo.
size_t & BatchSize()
Modify the batch size.
Definition: adam.hpp:143
double Tolerance() const
Get the tolerance for termination.
Definition: adam.hpp:166
double Tolerance() const
Get the tolerance for termination.
Definition: sgd.hpp:149
size_t & MaxIterations()
Modify the maximum number of iterations (0 indicates no limit).
Definition: adam.hpp:163
double Optimize(DecomposableFunctionType &function, arma::mat &iterate)
Optimize the given function using Adam.
Definition: adam.hpp:130
size_t BatchSize() const
Get the batch size.
Definition: adam.hpp:141
size_t MaxIterations() const
Get the maximum number of iterations (0 indicates no limit).
Definition: sgd.hpp:144
double StepSize() const
Get the step size.
Definition: sgd.hpp:134
Adam is an optimizer that computes individual adaptive learning rates for different parameters from e...
Definition: adam.hpp:87
bool Shuffle() const
Get whether or not the individual functions are shuffled.
Definition: adam.hpp:171
AdamType(const double stepSize=0.001, const size_t batchSize=32, const double beta1=0.9, const double beta2=0.999, const double eps=1e-8, const size_t maxIterations=100000, const double tolerance=1e-5, const bool shuffle=true)
Construct the Adam optimizer with the given function and parameters.
double Beta2() const
Get the second moment coefficient.
Definition: adam.hpp:151
double & StepSize()
Modify the step size.
Definition: adam.hpp:138
double & Beta1()
Modify the smoothing parameter.
Definition: adam.hpp:148
double & Epsilon()
Modify the value used to initialise the mean squared gradient parameter.
Definition: adam.hpp:158
double Beta1() const
Get the smoothing parameter.
Definition: adam.hpp:146
double StepSize() const
Get the step size.
Definition: adam.hpp:136
double Optimize(DecomposableFunctionType &function, arma::mat &iterate)
Optimize the given function using stochastic gradient descent.