Newer
Older

Michal Kravcenko
committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 19.2.19 -
*/
#include "LearningSequence.h"
namespace lib4neuro {
LearningSequence::LearningSequence( double tolerance, int max_n_cycles ){
this->tol = tolerance;
this->max_number_of_cycles = max_n_cycles;
this->best_parameters = new std::vector<double>();
}
LearningSequence::~LearningSequence() {
}
std::vector<double>* LearningSequence::get_parameters() {
if( this->learning_sequence.size() > 0 ){
return this->learning_sequence[0]->get_parameters( );
}
return nullptr;
}
void LearningSequence::add_learning_method(ILearningMethods *method) {
this->learning_sequence.push_back( method );
}
void LearningSequence::optimize(lib4neuro::ErrorFunction &ef, std::ofstream *ofs) {
double error = ef.eval();
double the_best_error = error;
int mcycles = this->max_number_of_cycles, cycle_idx = 0;
while( error > this->tol && mcycles != 0){
mcycles--;
cycle_idx++;
for( auto m: this->learning_sequence ){
m->optimize( ef, ofs );
error = ef.eval();
if( error < the_best_error ){
the_best_error = error;
*this->best_parameters = *ef.get_parameters();
}
if( error <= this->tol ){
ef.get_network_instance()->copy_parameter_space( this->best_parameters );
return;
}
}
COUT_DEBUG( "Cycle: " << cycle_idx << ", the lowest error: " << the_best_error << std::endl );
}
}
}