Newer
Older
Martin Beseda
committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
//
// Created by martin on 25.11.18.
//
/**
* This file serves for testing of various examples, have fun!
*
* @author Michal Kravčenko
* @date 14.6.18 -
*/
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
#include "../CrossValidator/CrossValidator.h"
double get_rel_error(std::vector<double> &d1, std::vector<double> &d2){
double out = 0, m, n = 0;
assert(d1.size() == d2.size());
for( size_t i = 0; i < d1.size(); ++i){
m = d1[i] - d2[i];
n += d1[i]*d1[i];
out += m*m;
}
return m/n;
}
int main(int argc, char** argv){
Martin Beseda
committed
Martin Beseda
committed
// l4n::CSVReader reader("/home/martin/Desktop/ANN_DATA_1_SET.txt", "\t", true);
l4n::CSVReader reader("/tmp/data_Heaviside.txt", "\t", false);
Martin Beseda
committed
/* Create data set for both the training and testing of the neural network */
Martin Beseda
committed
std::vector<unsigned int> inputs = { 0 };
std::vector<unsigned int> outputs = { 1 };
Martin Beseda
committed
l4n::DataSet ds = reader.get_data_set(&inputs, &outputs);
ds.normalize();
// ds.print_data();
Martin Beseda
committed
/* Neural network construction */
Martin Beseda
committed
std::vector<unsigned int> neuron_numbers_in_layers = {1, 10, 10, 1};
l4n::FullyConnectedFFN nn(&neuron_numbers_in_layers, l4n::NEURON_TYPE::LOGISTIC);
Martin Beseda
committed
/* Error function */
l4n::MSE mse(&nn, &ds);
Martin Beseda
committed
/* Domain */
std::vector<double> domain_bounds(2 * (nn.get_n_weights() + nn.get_n_biases()));
/* Training method */
Martin Beseda
committed
// for(size_t i = 0; i < domain_bounds.size() / 2; ++i){
// domain_bounds[2 * i] = -10;
// domain_bounds[2 * i + 1] = 10;
// }
// l4n::ParticleSwarm ps(&domain_bounds,
// 1.711897,
// 1.711897,
// 0.711897,
// 0.5,
// 20,
// 0.7,
// 600,
// 1000);
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
l4n::GradientDescent gs(1e-3, 100, 100000);
nn.randomize_weights();
/* Cross - validation */
l4n::CrossValidator cv(&gs, &mse);
cv.run_k_fold_test(10, 1);
/* Save network to the file */
nn.save_text("test_net.4n");
/* Check of the saved network */
std::cout << std::endl << "The original network info:" << std::endl;
nn.print_stats();
nn.print_weights();
l4n::NeuralNetwork nn_loaded("test_net.4n");
std::cout << std::endl << "The loaded network info:" << std::endl;
nn_loaded.print_stats();
nn_loaded.print_weights();
/* Example of evaluation of a single input, normalized input, de-normalized output */
std::vector<double> input_norm(ds.get_input_dim()),
input(ds.get_input_dim()),
output_norm(ds.get_output_dim()),
expected_output_norm(ds.get_output_dim()),
output(ds.get_output_dim());
size_t data_idx = 0;
ds.get_input(input_norm, data_idx);
ds.get_output(expected_output_norm, data_idx);
nn_loaded.eval_single(input_norm, output_norm);
ds.de_normalize_single(output_norm, output);
ds.de_normalize_single(input_norm, input);
std::cout << std::endl << "input: ";
for (auto el: input_norm) { std::cout << el << ", "; }
std::cout << std::endl;
std::cout << "output: ";
for (auto el: output) { std::cout << el << ", "; }
std::cout << std::endl;
std::cout << "error of the " << data_idx << "-th element: "
<< get_rel_error(output_norm, expected_output_norm) << std::endl;
return 0;
} catch(const std::runtime_error& e) {
Martin Beseda
committed
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);
} catch(const std::out_of_range& e) {
Martin Beseda
committed
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);
} catch(const std::invalid_argument& e) {
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);