Newer
Older

Michal Kravcenko
committed
/**
* This file serves for testing of various examples, have fun!

Michal Kravcenko
committed
*
* @author Michal Kravčenko
* @date 14.6.18 -
*/
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>

Michal Kravcenko
committed
#include "4neuro.h"
#include "../CrossValidator/CrossValidator.h"
double get_rel_error(std::vector<double> &d1, std::vector<double> &d2){
double out = 0, m, n = 0;
assert(d1.size() == d2.size());
for( size_t i = 0; i < d1.size(); ++i){
m = d1[i] - d2[i];
n += d1[i]*d1[i];
out += m*m;
}

Michal Kravcenko
committed
int main(int argc, char** argv){
Martin Beseda
committed
/* Read data from the file */
l4n::CSVReader reader("/home/martin/Desktop/ANN_DATA_1_SET.txt", "\t", true);
Martin Beseda
committed
/* Create data set for both the training and testing of the neural network */
std::vector<unsigned int> inputs = {2,3,4,5,6,7,8,26,27,28};
std::vector<unsigned int> outputs = {17,18,19,20,21,22,23,24,25};
l4n::DataSet ds = reader.get_data_set(&inputs, &outputs);
Martin Beseda
committed
/* Normalize data in the set for easier training of the network */
ds.normalize();
Martin Beseda
committed
std::vector<unsigned int> neuron_numbers_in_layers = {10,10,10,9};
l4n::FullyConnectedFFN nn(&neuron_numbers_in_layers, l4n::NEURON_TYPE::LOGISTIC);
/* Error function */
l4n::MSE mse(&nn, &ds);
/* Domain */
std::vector<double> domain_bounds(2 * (nn.get_n_weights() + nn.get_n_biases()));
/* Training method */
Martin Beseda
committed
// for(size_t i = 0; i < domain_bounds.size() / 2; ++i){
// domain_bounds[2 * i] = -10;
// domain_bounds[2 * i + 1] = 10;
// }
// l4n::ParticleSwarm ps(&domain_bounds,
// 1.711897,
// 1.711897,
// 0.711897,
// 0.5,
// 20,
// 0.7,
// 600,
// 1000);
l4n::GradientDescent gs(1e-3, 100, 100000);
Martin Beseda
committed
nn.randomize_weights();
/* Cross - validation */
l4n::CrossValidator cv(&gs, &mse);
cv.run_k_fold_test(10, 1);
Martin Beseda
committed
/* Save network to the file */
nn.save_text("test_net.4n");
/* Check of the saved network */
std::cout << std::endl << "The original network info:" << std::endl;
Martin Beseda
committed
nn.print_stats();
Martin Beseda
committed
nn.print_weights();
Martin Beseda
committed
l4n::NeuralNetwork nn_loaded("test_net.4n");
std::cout << std::endl << "The loaded network info:" << std::endl;
Martin Beseda
committed
nn_loaded.print_stats();
Martin Beseda
committed
nn_loaded.print_weights();
Martin Beseda
committed
/* Example of evaluation of a single input, normalized input, de-normalized output */
std::vector<double> input_norm(ds.get_input_dim()),
input(ds.get_input_dim()),
output_norm(ds.get_output_dim()),
expected_output_norm(ds.get_output_dim()),
output(ds.get_output_dim());
size_t data_idx = 0;
ds.get_input( input_norm, data_idx );
ds.get_output( expected_output_norm, data_idx );
nn_loaded.eval_single(input_norm, output_norm);
ds.de_normalize_single(output_norm, output);
ds.de_normalize_single(input_norm, input);
std::cout << std::endl << "input: ";
for(auto el: input_norm){std::cout << el << ", ";}
std::cout << std::endl;
std::cout << "output: ";
for(auto el: output){std::cout << el << ", ";}
std::cout << std::endl;
std::cout << "error of the " << data_idx << "-th element: "
<< get_rel_error(output_norm, expected_output_norm) << std::endl;

Michal Kravcenko
committed
return 0;