Newer
Older
Martin Beseda
committed
//
// Created by martin on 25.11.18.
//
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
Martin Beseda
committed
try {
/* PHASE 1 - TRAINING DATA LOADING, NETWORK ASSEMBLY AND PARTICLE SWARM OPTIMIZATION */
l4n::CSVReader reader1("/tmp/lib4neuro_pokus/simulator_input.txt", "\t", true); // File, separator, skip 1st line
reader1.read(); // Read from the file
/* PHASE 1 - NEURAL NETWORK SPECIFICATION */
/* Create data set for both the first training of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs1 = { 0 }; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs1 = { 1 }; // Possible multiple outputs, e.g. {1,2}
l4n::DataSet ds1 = reader1.get_data_set(&inputs1, &outputs1); // Creation of data-set for NN
ds1.normalize(); // Normalization of data to prevent numerical problems
/* Numbers of neurons in layers (including input and output layers) */
std::vector<unsigned int> neuron_numbers_in_layers = { 1, 3, 1 };
/* Fully connected feed-forward network with linear activation functions for input and output */
/* layers and the specified activation fns for the hidden ones (each entry = layer)*/
std::vector<l4n::NEURON_TYPE> hidden_type_v = { l4n::NEURON_TYPE::LOGISTIC}; // hidden_type_v = {l4n::NEURON_TYPE::LOGISTIC, l4n::NEURON_TYPE::LINEAR}
l4n::FullyConnectedFFN nn1(&neuron_numbers_in_layers, &hidden_type_v);
Martin Beseda
committed
l4n::MSE mse1(&nn1, &ds1); // First parameter - neural network, second parameter - data-set
Martin Beseda
committed
/* Particle Swarm method domain*/
std::vector<double> domain_bounds(2 * (nn1.get_n_weights() + nn1.get_n_biases()));
for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
domain_bounds[2 * i] = -10;
domain_bounds[2 * i + 1] = 10;
}
// 1) domain_bounds Bounds for every optimized parameter (p1_lower, p1_upper, p2_lower, p2_upper...)
// 2) c1 Cognitive parameter
// 3) c2 Social parameter
// 4) w Inertia weight
// 5) gamma Threshold value for particle velocity - all particles must posses the same or slower velocity for the algorithm to end
// 6) epsilon Radius of the cluster area (Euclidean distance)
// 7) delta Amount of particles, which has to be in the cluster for the algorithm to stop (0-1)
// 8) n_particles Number of particles in the swarm
// 9) iter_max Maximal number of iterations - optimization will stop after that, even if not converged
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
l4n::ParticleSwarm ps(&domain_bounds,
1.711897,
1.711897,
0.711897,
0.5,
0.3,
0.7,
150,
1500);
/* Weight and bias randomization in the network accordingly to the uniform distribution */
nn1.randomize_parameters();
/* Particle Swarm optimization */
ps.optimize(mse1);
/* Save Neural network parameters to file */
nn1.save_text("test_net_Particle_Swarm.4n");
/* PHASE 3 - LOADING NN FROM FILE AND TRAINING NO 2 - GRADIENT DESCENT */
l4n::NeuralNetwork nn2("test_net_Particle_Swarm.4n");
/* Training data loading for the second phase */
l4n::CSVReader reader2("/tmp/lib4neuro_pokus/simulator_input.txt", "\t", true); // File, separator, skip 1st line
reader2.read(); // Read from the file
/* Create data set for both the first training of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs2 = { 0 }; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs2 = { 1 }; // Possible multiple outputs, e.g. {1,2}
l4n::DataSet ds2 = reader2.get_data_set(&inputs2, &outputs2); // Creation of data-set for NN
ds2.normalize(); // Normalization of data to prevent numerical problems
/* Error function */
l4n::MSE mse2(&nn2, &ds2); // First parameter - neural network, second parameter - data-set
// Parameters of the gradient descent
// 1) Threshold for the successful ending of the optimization - deviation from minima
// 2) Number of iterations to reset step size to tolerance/10.0
// 3) Maximal number of iterations - optimization will stop after that, even if not converged
/* Gradient Descent Optimization */
gs.optimize(mse2); // Network training
/* Save Neural network parameters to file */
nn2.save_text("test_net_Gradient_Descent.4n");
/* Output file specification */
std::string filename = "simulator_output.txt";
std::ofstream output_file(filename);
if (!output_file.is_open()) {
throw std::runtime_error("File '" + filename + "' can't be opened!");
}
/* Neural network loading */
l4n::NeuralNetwork nn3("test_net_Gradient_Descent.4n");
/* Check of the saved network - write to the file */
output_file << std::endl << "The loaded network info:" << std::endl;
nn3.write_stats(&output_file);
nn3.write_weights(&output_file);
nn3.write_biases(&output_file);
/* Evaluate network on an arbitrary data-set and save results into the file */
l4n::CSVReader reader3("/tmp/lib4neuro_pokus/simulator_input.txt", "\t", true); // File, separator, skip 1st line
reader3.read(); // Read from the file
/* Create data set for both the testing of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs3 = { 0 }; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs3 = { 1 }; // Possible multiple outputs, e.g. {1,2}
l4n::DataSet ds3 = reader3.get_data_set(&inputs3, &outputs3); // Creation of data-set for NN
ds3.normalize(); // Normalization of data to prevent numerical problems
output_file << std::endl << "Evaluating network on the dataset: " << std::endl;
output_file << "Output and the error:" << std::endl;
/* Error function */
l4n::MSE mse3(&nn3, &ds3); // First parameter - neural network, second parameter - data-set
std::cout << "Eval on normalized data:" << std::endl;
mse3.eval_on_data_set(&ds3, &output_file);
std::cout << "Eval on de-normalized data:" << std::endl;
ds3.de_normalize();
mse3.eval_on_data_set(&ds3, &output_file);
/* Close the output file for writing */
output_file.close();
Martin Beseda
committed
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);