Skip to content
Snippets Groups Projects
Commit cee99b51 authored by Martin Beseda's avatar Martin Beseda
Browse files

FIX: Output rewritten properly using macros from message.h.

parent fa957f48
No related branches found
No related tags found
No related merge requests found
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
*/ */
#include "GradientDescent.h" #include "GradientDescent.h"
#include "message.h"
namespace lib4neuro { namespace lib4neuro {
GradientDescent::GradientDescent(double epsilon, size_t n_to_restart, int max_iters, size_t batch) { GradientDescent::GradientDescent(double epsilon, size_t n_to_restart, int max_iters, size_t batch) {
...@@ -49,10 +50,8 @@ namespace lib4neuro { ...@@ -49,10 +50,8 @@ namespace lib4neuro {
ef.get_dataset()->get_normalization_strategy()); ef.get_dataset()->get_normalization_strategy());
} }
std::cout << "Finding a solution via a Gradient Descent method with adaptive step-length" << std::endl; COUT_INFO("Finding a solution via a Gradient Descent method with adaptive step-length..." << std::endl);
std::cout
<< "********************************************************************************************************************************************"
<< std::endl;
double grad_norm = this->tolerance * 10.0, gamma, sx, beta; double grad_norm = this->tolerance * 10.0, gamma, sx, beta;
double grad_norm_prev; double grad_norm_prev;
size_t i; size_t i;
...@@ -131,16 +130,18 @@ namespace lib4neuro { ...@@ -131,16 +130,18 @@ namespace lib4neuro {
params_current = ptr_mem; params_current = ptr_mem;
val = ef.eval(params_current); val = ef.eval(params_current);
if (iter_counter % 1 == 0) {
printf("Iteration %12d. Step size: %15.8f, C: %15.8f, Gradient norm: %15.8f. Total error: %10.8f\r", COUT_DEBUG(std::string("Iteration: ") << (unsigned int)(iter_counter)
(int) iter_counter, gamma, c, grad_norm, val); << ". Step size: " << gamma
std::cout.flush(); << ". C: " << c
} << ". Gradient norm: " << grad_norm
<< ". Total error: " << val
<< "." << std::endl);
} }
printf("Iteration %12d. Step size: %15.8f, C: %15.8f, Gradient norm: %15.8f. Total error: %10.8f\n",
(int) iter_counter, gamma, c, grad_norm, val);
std::cout.flush();
if(iter_idx == 0) {
COUT_INFO("Maximum number of iterations (" << this->maximum_niters << ") was reached!" << std::endl);
}
*this->optimal_parameters = *params_current; *this->optimal_parameters = *params_current;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment