From c7b8fe29a514b8ecc087ddf509500c7b4fd9fb74 Mon Sep 17 00:00:00 2001
From: Martin Beseda <martin.beseda@vsb.cz>
Date: Mon, 21 Jan 2019 18:12:28 +0100
Subject: [PATCH] FIX: Fixed batch being passed as a parameter instead of
 alpha.

---
 src/LearningMethods/GradientDescent.cpp | 34 ++++++++++++-------------
 1 file changed, 17 insertions(+), 17 deletions(-)

diff --git a/src/LearningMethods/GradientDescent.cpp b/src/LearningMethods/GradientDescent.cpp
index 1e837e0c..27e7808b 100644
--- a/src/LearningMethods/GradientDescent.cpp
+++ b/src/LearningMethods/GradientDescent.cpp
@@ -89,7 +89,7 @@ namespace lib4neuro {
             /* reset of the current gradient */
             std::fill(gradient_current->begin(), gradient_current->end(), 0.0);
 //        std::fill(gradient_mem.begin(), gradient_mem.end(), 0.0);
-            ef.calculate_error_gradient(*params_current, *gradient_current, this->batch);
+            ef.calculate_error_gradient(*params_current, *gradient_current, 1.0, this->batch);
 //        double error_analytical = this->calculate_gradient( ef.get_dataset()->get_data(), (size_t)2, params_current, gradient_current );
 
 //        for(size_t k = 0; k < gradient_mem.size(); ++k){
@@ -143,18 +143,18 @@ namespace lib4neuro {
             val = ef.eval(params_current);
 
             COUT_DEBUG(std::string("Iteration: ") << (unsigned int)(iter_counter)
-                       << ". Step size: " << gamma
-                       << ". C: " << c
-                       << ". Gradient norm: " << grad_norm
-                       << ". Total error: " << val
-                       << "." << "\r");
+                                                  << ". Step size: " << gamma
+                                                  << ". C: " << c
+                                                  << ". Gradient norm: " << grad_norm
+                                                  << ". Total error: " << val
+                                                  << "." << std::endl );
 
             WRITE_TO_OFS_DEBUG(ofs, "Iteration: " << (unsigned int)(iter_counter)
-                                    << ". Step size: " << gamma
-                                    << ". C: " << c
-                                    << ". Gradient norm: " << grad_norm
-                                    << ". Total error: " << val
-                                    << "." << std::endl);
+                                                  << ". Step size: " << gamma
+                                                  << ". C: " << c
+                                                  << ". Gradient norm: " << grad_norm
+                                                  << ". Total error: " << val
+                                                  << "." << std::endl);
 
 
         }
@@ -175,15 +175,15 @@ namespace lib4neuro {
 
         } else {
             COUT_INFO("Gradient Descent method converged after "
-                      << this->maximum_niters-iter_idx
-                      << " iterations."
-                      << std::endl);
+                              << this->maximum_niters-iter_idx
+                              << "iterations."
+                              << std::endl);
 #ifdef L4N_DEBUG
             if(ofs && ofs->is_open()) {
                 *ofs << "Gradient Descent method converged after "
-                      << this->maximum_niters-iter_idx
-                      << " iterations."
-                      << std::endl;
+                     << this->maximum_niters-iter_idx
+                     << "iterations."
+                     << std::endl;
             }
 #endif
         }
-- 
GitLab