Newer
Older
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 30.7.18 -
*/
#ifndef INC_4NEURO_GRADIENTDESCENT_H
#define INC_4NEURO_GRADIENTDESCENT_H
#include "../constants.h"

Michal Kravcenko
committed
#include "ILearningMethods.h"
#include "../ErrorFunction/ErrorFunctions.h"
Martin Beseda
committed
/**
*
*/
class GradientDescent : public ILearningMethods {
* Threshold for the successful ending of the optimization - deviation from minima
Martin Beseda
committed
double max_error;
* Number of iterations to reset step size to tolerance/10.0
/**
* Maximal number of iterations - optimization will stop after that, even if not converged
*/
long long int maximum_niters;
/**
* Vector of minima coordinates
*/
std::vector<double> *optimal_parameters;
/**
*
* @param gamma
* @param beta
* @param c
* @param grad_norm_prev
* @param grad_norm
* @param fi
* @param fim
*/
virtual void
Martin Beseda
committed
eval_step_size_mk(double &gamma,
double beta,
double &c,
double grad_norm_prev,
double grad_norm,
double fi,
* Creates an instance of Gradient Descent Optimizer (i.e. back-propagation)
* @param epsilon Threshold for the successful ending of the optimization - deviation from minima
* @param n_to_restart Number of iterations to reset step size to tolerance/10.0
* @param max_iters Maximal number of iterations - optimization will stop after that, even if not converged
Martin Beseda
committed
LIB4NEURO_API GradientDescent(double epsilon = 1e-3, size_t n_to_restart = 100, int max_iters = 1000, size_t batch = 0);
* Deallocates the instance
*/
LIB4NEURO_API ~GradientDescent();
/**
*
* @param ef
*/
LIB4NEURO_API virtual void optimize(lib4neuro::ErrorFunction &ef);
/**
*
* @return
*/
LIB4NEURO_API virtual std::vector<double> *get_parameters();
};
}
#endif //INC_4NEURO_GRADIENTDESCENT_H