Newer
Older
Martin Beseda
committed
//
// Created by martin on 25.11.18.
//
/**
* This file serves for testing of various examples, have fun!
*
* @author Michal Kravčenko
* @date 14.6.18 -
*/
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
#include "../CrossValidator/CrossValidator.h"
int main(int argc, char** argv){
Martin Beseda
committed
l4n::CSVReader reader("/home/martin/Desktop/data_Heaviside.txt", "\t", true); // File, separator, skip 1st line
reader.read(); // Read from the file
Martin Beseda
committed
/* Open file for writing */
std::string filename = "simulator_output.txt";
std::ofstream output_file(filename);
if(!output_file.is_open()) {
throw std::runtime_error("File '" + filename + "' can't be opened!");
}
/* Create data set for both the training and testing of the neural network */
std::vector<unsigned int> inputs = { 3 }; // Possible multiple inputs, e.g. {0,3}
std::vector<unsigned int> outputs = { 1 }; // Possible multiple outputs, e.g. {1,2}
Martin Beseda
committed
l4n::DataSet ds = reader.get_data_set(&inputs, &outputs); // Creation of data-set for NN
ds.normalize(); // Normalization of data to prevent numerical problems
// ds.print_data(); // Printing of data-set to check it
Martin Beseda
committed
/* Neural network construction */
// Numbers of neurons in layers (including input and output layers)
Martin Beseda
committed
std::vector<unsigned int> neuron_numbers_in_layers = {1, 10, 10, 1};
// Creation of fully connected feed-forward network with linear activation functions for input and output
// layers and the specified a.f. for the hidden ones
l4n::FullyConnectedFFN nn(&neuron_numbers_in_layers, l4n::NEURON_TYPE::LOGISTIC);
Martin Beseda
committed
l4n::MSE mse(&nn, &ds); // First parameter - neural network, second parameter - data-set
Martin Beseda
committed
/* Domain - important for Particle Swarm method */
std::vector<double> domain_bounds(2 * (nn.get_n_weights() + nn.get_n_biases()));
for(size_t i = 0; i < domain_bounds.size() / 2; ++i){
domain_bounds[2 * i] = -10;
domain_bounds[2 * i + 1] = 10;
}
// Parameters
// 1) domain_bounds Bounds for every optimized parameter (p1_lower, p1_upper, p2_lower, p2_upper...)
// 2) c1 Cognitive parameter
// 3) c2 Social parameter
// 4) w Inertia weight
// 5) gamma Threshold value for particle velocity - all particles must posses the same or slower velocity for the algorithm to end
// 6) epsilon Radius of the cluster area (Euclidean distance)
// 7) delta Amount of particles, which has to be in the cluster for the algorithm to stop (0-1)
// 8) n_particles Number of particles in the swarm
// 9) iter_max Maximal number of iterations - optimization will stop after that, even if not converged
// l4n::ParticleSwarm ps(&domain_bounds,
// 1.711897,
// 1.711897,
// 0.711897,
// 0.5,
// 20,
// 0.7,
// 600,
// 1000);
// Parameters
// 1) Threshold for the successful ending of the optimization - deviation from minima
// 2) Number of iterations to reset step size to tolerance/10.0
// 3) Maximal number of iterations - optimization will stop after that, even if not converged
l4n::GradientDescent gs(1e-3, 100, 200);
// Weight and bias randomization in the network according to the uniform distribution
// Calling methods nn.randomize_weights() and nn.randomize_biases()
nn.randomize_parameters();
// gs.optimize(mse); // Network training
// std::vector<double> i(ds.get_input_dim());
// std::vector<double> o(ds.get_output_dim());
// nn.eval_single(i, o); // Evaluate network for one input and save the result into the output vector
/* Cross - validation */
l4n::CrossValidator cv(&gs, &mse);
// Parameters:
// 1) Number of data-set parts used for CV
// 2) Number of tests performed
// git 3) File-path to the files with data from cross-validation (one CV run - one file)
cv.run_k_fold_test(10, 3, &output_file);
nn.save_text("test_net.4n");
/* Check of the saved network - print to STDOUT */
std::cout << std::endl << "The original network info:" << std::endl;
nn.write_stats();
nn.write_weights();
nn.write_biases();
l4n::NeuralNetwork nn_loaded("test_net.4n");
std::cout << std::endl << "The loaded network info:" << std::endl;
nn_loaded.write_stats();
nn.write_weights();
nn.write_biases();
/* Check of the saved network - write to the file */
output_file << std::endl << "The original network info:" << std::endl;
nn.write_stats(&output_file);
nn.write_weights(&output_file);
nn.write_biases(&output_file);
output_file << std::endl << "The loaded network info:" << std::endl;
nn_loaded.write_stats(&output_file);
nn.write_weights(&output_file);
nn.write_biases(&output_file);
/* Example of evaluation of a single input, normalized input, de-normalized output */
std::vector<double> input_norm(ds.get_input_dim()),
input(ds.get_input_dim()),
output_norm(ds.get_output_dim()),
expected_output_norm(ds.get_output_dim()),
output(ds.get_output_dim()),
expected_output(ds.get_output_dim());
size_t data_idx = 0;
ds.get_input(input_norm, data_idx);
ds.get_output(expected_output_norm, data_idx);
nn_loaded.eval_single(input_norm, output_norm);
ds.de_normalize_single(output_norm, output);
ds.de_normalize_single(input_norm, input);
ds.de_normalize_single(expected_output_norm, expected_output);
/* Evaluate network on an arbitrary data-set and save results into the file */
l4n::DataSet ds2;
std::vector<double> inp, out;
for(double i = 0; i < 5; i++) {
inp = {i};
out = {i+2};
ds2.add_data_pair(inp, out);
}
output_file << std::endl << "Evaluating network on the dataset: " << std::endl;
ds2.store_data_text(&output_file);
output_file << "Output and the error:" << std::endl;
mse.eval_on_data_set(&ds2, &output_file);
/* Close the output file for writing */
output_file.close();
} catch(const std::exception& e) {
Martin Beseda
committed
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);