Skip to content
Snippets Groups Projects
Commit c7b8fe29 authored by Martin Beseda's avatar Martin Beseda
Browse files

FIX: Fixed batch being passed as a parameter instead of alpha.

parent a9dc7d8f
No related branches found
No related tags found
No related merge requests found
......@@ -89,7 +89,7 @@ namespace lib4neuro {
/* reset of the current gradient */
std::fill(gradient_current->begin(), gradient_current->end(), 0.0);
// std::fill(gradient_mem.begin(), gradient_mem.end(), 0.0);
ef.calculate_error_gradient(*params_current, *gradient_current, this->batch);
ef.calculate_error_gradient(*params_current, *gradient_current, 1.0, this->batch);
// double error_analytical = this->calculate_gradient( ef.get_dataset()->get_data(), (size_t)2, params_current, gradient_current );
// for(size_t k = 0; k < gradient_mem.size(); ++k){
......@@ -143,18 +143,18 @@ namespace lib4neuro {
val = ef.eval(params_current);
COUT_DEBUG(std::string("Iteration: ") << (unsigned int)(iter_counter)
<< ". Step size: " << gamma
<< ". C: " << c
<< ". Gradient norm: " << grad_norm
<< ". Total error: " << val
<< "." << "\r");
<< ". Step size: " << gamma
<< ". C: " << c
<< ". Gradient norm: " << grad_norm
<< ". Total error: " << val
<< "." << std::endl );
WRITE_TO_OFS_DEBUG(ofs, "Iteration: " << (unsigned int)(iter_counter)
<< ". Step size: " << gamma
<< ". C: " << c
<< ". Gradient norm: " << grad_norm
<< ". Total error: " << val
<< "." << std::endl);
<< ". Step size: " << gamma
<< ". C: " << c
<< ". Gradient norm: " << grad_norm
<< ". Total error: " << val
<< "." << std::endl);
}
......@@ -175,15 +175,15 @@ namespace lib4neuro {
} else {
COUT_INFO("Gradient Descent method converged after "
<< this->maximum_niters-iter_idx
<< " iterations."
<< std::endl);
<< this->maximum_niters-iter_idx
<< "iterations."
<< std::endl);
#ifdef L4N_DEBUG
if(ofs && ofs->is_open()) {
*ofs << "Gradient Descent method converged after "
<< this->maximum_niters-iter_idx
<< " iterations."
<< std::endl;
<< this->maximum_niters-iter_idx
<< "iterations."
<< std::endl;
}
#endif
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment