diff --git a/include/caffe/solver.hpp b/include/caffe/solver.hpp index 582aa1427d3..ab12ef1b1bd 100644 --- a/include/caffe/solver.hpp +++ b/include/caffe/solver.hpp @@ -217,6 +217,14 @@ class AdaDeltaSolver : public SGDSolver { DISABLE_COPY_AND_ASSIGN(AdaDeltaSolver); }; +/** + * @brief AdamSolver, an algorithm for first-order gradient-based optimization + * of stochastic objective functions, based on adaptive estimates of + * lower-order moments. Described in [1]. + * + * [1] D. P. Kingma and J. L. Ba, "ADAM: A Method for Stochastic Optimization." + * arXiv preprint arXiv:1412.6980v8 (2014). + */ template class AdamSolver : public SGDSolver { public: