From 5c7df30d8ee52fa5f45be65ddf742b281ca3eeab Mon Sep 17 00:00:00 2001 From: Martin Beseda <martin.beseda@vsb.cz> Date: Sun, 6 Jan 2019 09:38:16 +0100 Subject: [PATCH] FIX: Output rewritten properly using macros from message.h. --- src/LearningMethods/GradientDescent.cpp | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/LearningMethods/GradientDescent.cpp b/src/LearningMethods/GradientDescent.cpp index a457d0cf..b8250acc 100644 --- a/src/LearningMethods/GradientDescent.cpp +++ b/src/LearningMethods/GradientDescent.cpp @@ -6,6 +6,7 @@ */ #include "GradientDescent.h" +#include "message.h" namespace lib4neuro { GradientDescent::GradientDescent(double epsilon, size_t n_to_restart, int max_iters, size_t batch) { @@ -49,10 +50,8 @@ namespace lib4neuro { ef.get_dataset()->get_normalization_strategy()); } - std::cout << "Finding a solution via a Gradient Descent method with adaptive step-length" << std::endl; - std::cout - << "********************************************************************************************************************************************" - << std::endl; + COUT_INFO("Finding a solution via a Gradient Descent method with adaptive step-length..." << std::endl); + double grad_norm = this->tolerance * 10.0, gamma, sx, beta; double grad_norm_prev; size_t i; @@ -131,16 +130,18 @@ namespace lib4neuro { params_current = ptr_mem; val = ef.eval(params_current); - if (iter_counter % 1 == 0) { - printf("Iteration %12d. Step size: %15.8f, C: %15.8f, Gradient norm: %15.8f. Total error: %10.8f\r", - (int) iter_counter, gamma, c, grad_norm, val); - std::cout.flush(); - } + + COUT_DEBUG(std::string("Iteration: ") << (unsigned int)(iter_counter) + << ". Step size: " << gamma + << ". C: " << c + << ". Gradient norm: " << grad_norm + << ". Total error: " << val + << "." << std::endl); } - printf("Iteration %12d. Step size: %15.8f, C: %15.8f, Gradient norm: %15.8f. Total error: %10.8f\n", - (int) iter_counter, gamma, c, grad_norm, val); - std::cout.flush(); + if(iter_idx == 0) { + COUT_INFO("Maximum number of iterations (" << this->maximum_niters << ") was reached!" << std::endl); + } *this->optimal_parameters = *params_current; -- GitLab