Skip to content
Snippets Groups Projects
Commit 84e9c88b authored by Michal Kravcenko's avatar Michal Kravcenko
Browse files

ADD: added a new example to test the implementation of the new Simulator class

parent 0502c3a8
No related branches found
No related tags found
No related merge requests found
......@@ -10,6 +10,7 @@
#include "../src/DataSet/DataSet.h"
#include "../src/Network/NeuralNetwork.h"
#include "../src/Network/NeuralNetworkSum.h"
#include "../src/Simulator/Simulator.h"
#include "../src/Neuron/Neuron.h"
#include "../src/Neuron/NeuronConstant.h"
#include "../src/Neuron/NeuronBinary.h"
......
......@@ -57,7 +57,9 @@ if ("${BUILD_LIB}" STREQUAL "yes")
LearningMethods/GradientDescentSingleItem.cpp
LearningMethods/LearningSequence.cpp
LearningMethods/RandomSolution.cpp
)
Simulator/Simulator.cpp
NetConnection/ConnectionFunctionConstant.cpp
Neuron/NeuronBiased.cpp Neuron/NeuronBiased.h)
# FileSystem C++ library - has to be linked manually in GCC-8
set(CXX_FILESYSTEM_LIB "")
......
......@@ -32,6 +32,9 @@ target_link_libraries(test_harmonic_oscilator PUBLIC lib4neuro)
add_executable(simulator simulator.cpp)
target_link_libraries(simulator PUBLIC lib4neuro)
add_executable(simulator2 simulator2.cpp)
target_link_libraries(simulator2 PUBLIC lib4neuro)
add_executable(x2_fitting x2_fitting.cpp)
target_link_libraries(x2_fitting PUBLIC lib4neuro)
......@@ -45,7 +48,8 @@ set_target_properties(
network_serialization
test_harmonic_oscilator
seminar
simulator
simulator
simulator2
x2_fitting
PROPERTIES
......@@ -116,6 +120,12 @@ target_include_directories(
${ROOT_DIR}/include
)
target_include_directories(
simulator2
PRIVATE
${ROOT_DIR}/include
)
target_include_directories(
x2_fitting
PRIVATE
......
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 15.3.19 -
*/
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
#include "../LearningMethods/RandomSolution.h"
int main(int argc, char** argv) {
//TODO NORMALIZACI NEPOUZIVAT KVULI CASOVYM POSUNUM
bool normalize_data = false;
double prec = 1e-9;
double prec_lm = 1e-9;
int restart_interval = 500;
int max_n_iters_gradient = 10000;
int max_n_iters_gradient_lm = 10000;
int max_n_iters_swarm = 20;
int n_particles_swarm = 200;
unsigned long batch_size = 0;
int max_number_of_cycles = 1;
try {
/* PHASE 2 - TRAINING DATA LOADING, SIMPLE SIMULATION */
l4n::CSVReader reader1("../../../data_files/data_BACK_RH_1.csv", ";", true); // File, separator, skip 1st line
reader1.read(); // Read from the file
/* PHASE 2 - NEURAL NETWORK SPECIFICATION */
/* Create data set for both the first training of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs = { 0 }; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs = { 2 }; // Possible multiple outputs, e.g. {1,2}
l4n::DataSet ds1 = reader1.get_data_set(&inputs, &outputs); // Creation of data-set for NN
if(normalize_data){
ds1.normalize(); // Normalization of data to prevent numerical problems
}
/* Numbers of neurons in layers (including input and output layers) */
std::vector<size_t> neuron_numbers_in_layers = { 1, 3, 3, 1 };
/* for each valve (1 in this example) setup the times of change */
std::vector<std::vector<double>> t;
t.push_back({0});
/* for each valve (1 in this example) setup the magnitudes of change */
std::vector<std::vector<double>> xi;
xi.push_back({ds1.get_data()->at(0).second});
/* The simulator2 object */
l4n::Simulator sim(outputs.size(), neuron_numbers_in_layers, t, xi);
/* Error function */
l4n::MSE mse1(&sim, &ds1); // First parameter - neural network, second parameter - data-set
/* Particle Swarm method domain*/
std::vector<double> domain_bounds(2 * (sim.get_n_weights() + sim.get_n_biases()));
for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
domain_bounds[2 * i] = -0.1;
domain_bounds[2 * i + 1] = 0.1;
}
l4n::RandomSolution rnd;
l4n::LevenbergMarquardt leven(max_n_iters_gradient_lm, batch_size, prec_lm );
l4n::LearningSequence learning_sequence( 1e-6, max_number_of_cycles );
learning_sequence.add_learning_method( &rnd );
learning_sequence.add_learning_method( &leven );
/* Weight and bias randomization in the network accordingly to the uniform distribution */
sim.randomize_parameters();
/* Complex Optimization */
learning_sequence.optimize(mse1); // Network training
/* Save Neural network parameters to file */
sim.save_text("test_net_Gradient_Descent.4n");
/* PHASE 4 - TESTING DATA */
// /* Output file specification */
std::string filename = "simulator_output.txt";
std::ofstream output_file(filename);
if (!output_file.is_open()) {
throw std::runtime_error("File '" + filename + "' can't be opened!");
}
//
// /* Neural network loading */
l4n::NeuralNetwork nn3("test_net_Gradient_Descent.4n");
/* Check of the saved network - write to the file */
output_file << std::endl << "The loaded network info:" << std::endl;
nn3.write_stats(&output_file);
nn3.write_weights(&output_file);
nn3.write_biases(&output_file);
//
// /* Evaluate network on an arbitrary data-set and save results into the file */
l4n::CSVReader reader3("../../../data_files/data_BACK_RH_1.csv", ";", true); // File, separator, skip 1st line
reader3.read(); // Read from the file
//
// /* Create data set for both the testing of the neural network */
// /* Specify which columns are inputs or outputs */
//
l4n::DataSet ds3 = reader3.get_data_set(&inputs, &outputs); // Creation of data-set for NN
if(normalize_data){
ds3.normalize(); // Normalization of data to prevent numerical problems
}
//
// output_file << std::endl << "Evaluating network on the dataset: " << std::endl;
// ds3.store_data_text(&output_file);
//
output_file << "Output and the error:" << std::endl;
//
// /* Error function */
l4n::MSE mse3(&nn3, &ds3); // First parameter - neural network, second parameter - data-set
mse3.eval_on_data_set(&ds3, &output_file, nullptr, normalize_data, true);
/* Close the output file for writing */
output_file.close();
return 0;
}
catch (const std::exception& e) {
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment