Skip to content
Snippets Groups Projects
Commit cc537040 authored by Martin Beseda's avatar Martin Beseda
Browse files

FIX: Added missing parameter (batch size) into Gradient Descent method.

parent 0ffba70e
No related branches found
No related tags found
No related merge requests found
......@@ -89,7 +89,7 @@ namespace lib4neuro {
/* reset of the current gradient */
std::fill(gradient_current->begin(), gradient_current->end(), 0.0);
// std::fill(gradient_mem.begin(), gradient_mem.end(), 0.0);
ef.calculate_error_gradient(*params_current, *gradient_current);
ef.calculate_error_gradient(*params_current, *gradient_current, this->batch);
// double error_analytical = this->calculate_gradient( ef.get_dataset()->get_data(), (size_t)2, params_current, gradient_current );
// for(size_t k = 0; k < gradient_mem.size(); ++k){
......@@ -163,13 +163,13 @@ namespace lib4neuro {
} else {
COUT_INFO("Gradient Descent method converged after "
<< this->maximum_niters-iter_idx
<< "iterations."
<< " iterations."
<< std::endl);
#ifdef L4N_DEBUG
if(ofs && ofs->is_open()) {
*ofs << "Gradient Descent method converged after "
<< this->maximum_niters-iter_idx
<< "iterations."
<< " iterations."
<< std::endl;
}
#endif
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment