Something went wrong on our end
-
Martin Beseda authoredMartin Beseda authored
simulator_1_2.cpp 6.89 KiB
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 15.3.19 -
*/
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
#include "../LearningMethods/RandomSolution.h"
int main(int argc, char** argv) {
//POZOR, NORMALIZACE JE NUTNA A JE TREBA MIT NA PAMETI, ZE MA VLIV NA CASOVE POSUNY
bool normalize_data = true;
double prec = 1e-9;
double prec_lm = 1e-9;
int restart_interval = 500;
int max_n_iters_gradient = 10000;
int max_n_iters_gradient_lm = 1;
int max_n_iters_swarm = 5;
int n_particles_swarm = 200;
unsigned long batch_size = 0;
int max_number_of_cycles = 1;
try {
puts("*********************** 1");
/* PHASE 2 - TRAINING DATA LOADING, SIMPLE SIMULATION */
l4n::CSVReader reader1("/home/martin/Desktop/ANN_MV_process_Data.csv", ";", true); // File, separator, skip 1st line
reader1.read(); // Read from the file
puts("*********************** 2");
/* PHASE 2 - NEURAL NETWORK SPECIFICATION */
/* Create data set for both the first training of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs = { 0 }; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs = { 2 }; // Possible multiple outputs, e.g. {1,2}
std::shared_ptr<l4n::DataSet> ds1 = reader1.get_data_set(&inputs, &outputs); // Creation of data-set for NN
ds1.operator->()->set_normalization_strategy( new DoubleUnitStrategy());
if(normalize_data){
ds1.get()->normalize(); // Normalization of data to prevent numerical problems
}
puts("*********************** 3");
/* Numbers of neurons in layers (including input and output layers) */
std::vector<size_t> neuron_numbers_in_layers = { 1, 4, 4, 1 };
/* for each valve (1 in this example) setup the times of change */
std::vector<std::vector<double>> t;
t.push_back({ds1.get()->get_normalized_value(0)});
/* for each valve (1 in this example) setup the magnitudes of change */
std::vector<std::vector<double>> xi;
// xi.push_back({ds1.get_data()->at(0).second});
xi.push_back({1.0});
/* The simulator2 object */
l4n::Simulator sim(outputs.size(), neuron_numbers_in_layers, t, xi);
puts("*********************** 4");
/* Error function */
l4n::MSE mse1(&sim, ds1.get()); // First parameter - neural network, second parameter - data-set
/* Particle Swarm method domain*/
std::vector<double> domain_bounds(2 * (sim.get_n_weights() + sim.get_n_biases()));
for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
domain_bounds[2 * i] = -0.1;
domain_bounds[2 * i + 1] = 0.1;
}
l4n::LearningSequence learning_sequence( 1e-6, max_number_of_cycles );
auto new_learning_method = std::make_shared<l4n::RandomSolution>();
learning_sequence.add_learning_method( new_learning_method );
auto new_learning_method2 = std::make_shared<l4n::ParticleSwarm>(l4n::ParticleSwarm(&domain_bounds,
1.711897,
1.711897,
0.711897,
0.5,
0.3,
0.7,
n_particles_swarm,
max_n_iters_swarm) );
// learning_sequence.add_learning_method( new_learning_method2 );
// std::shared_ptr<l4n::LearningMethod> new_learning_method3 = std::make_shared<l4n::LevenbergMarquardt>(l4n::LevenbergMarquardt(max_n_iters_gradient_lm, batch_size, prec_lm ) );
auto new_learning_method3 = std::make_shared<l4n::LevenbergMarquardt>(l4n::LevenbergMarquardt(max_n_iters_gradient_lm, batch_size, prec_lm ) );
learning_sequence.add_learning_method( new_learning_method3 );
puts("*********************** 5");
/* Complex Optimization */
learning_sequence.optimize(mse1); // Network training
puts("*********************** 6");
/* Save Neural network parameters to file */
sim.save_text("test_net_Gradient_Descent.4n");
/* PHASE 4 - TESTING DATA */
/* Output file specification */
std::string filename = "simulator_output.txt";
std::ofstream output_file(filename);
if (!output_file.is_open()) {
throw std::runtime_error("File '" + filename + "' can't be opened!");
}
/* Neural network loading */
l4n::NeuralNetwork nn3("test_net_Gradient_Descent.4n");
/* Check of the saved network - write to the file */
output_file << std::endl << "The loaded network info:" << std::endl;
nn3.write_stats(&output_file);
nn3.write_weights(&output_file);
nn3.write_biases(&output_file);
/* Evaluate network on an arbitrary data-set and save results into the file */
l4n::CSVReader reader3("../../../data_files/data_BACK_RH_1.csv", ";", true); // File, separator, skip 1st line
reader3.read(); // Read from the file
/* Create data set for both the testing of the neural network */
/* Specify which columns are inputs or outputs */
std::shared_ptr<l4n::DataSet> ds3 = reader3.get_data_set(&inputs, &outputs); // Creation of data-set for NN
ds3.operator->()->set_normalization_strategy( new DoubleUnitStrategy());
if(normalize_data){
ds3.get()->normalize(); // Normalization of data to prevent numerical problems
}
output_file << std::endl << "Evaluating network on the dataset: " << std::endl;
ds3->store_data_text(&output_file);
output_file << "Output and the error:" << std::endl;
/* Error function */
l4n::MSE mse3(&nn3, ds3.get()); // First parameter - neural network, second parameter - data-set
mse3.eval_on_data_set(ds3.get(), &output_file, nullptr, normalize_data, true);
/* Close the output file for writing */
output_file.close();
return 0;
}
catch (const std::exception& e) {
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);
}
}