Commit c8bf3ed6 authored by Michal Kravcenko's avatar Michal Kravcenko

FIX: deleted unnecesary files

parent dd87d505
This diff is collapsed.
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 11.3.19 -
*/
#ifndef LIB4NEURO_SIMULATOR_H
#define LIB4NEURO_SIMULATOR_H
#include <iostream>
#include "4neuro.h"
namespace lib4neuro {
class Simulator : public NeuralNetwork {
private:
size_t n_valves;
std::vector<size_t> index_start_output_connections_inclusive;
std::vector<size_t> index_end_output_connections_inclusive;
public:
LIB4NEURO_API explicit Simulator(
size_t n_outputs,
std::vector<size_t>& hidden_net_structure,
std::vector<std::vector<double>>& t,
std::vector<std::vector<double>>& xi
);
~Simulator();
LIB4NEURO_API size_t get_n_valves();
LIB4NEURO_API void eval_model(double t,
std::vector<double>& result);
LIB4NEURO_API void eval_model(double t,
size_t valve_idx,
std::vector<double>& result);
//TODO
LIB4NEURO_API void eval_model(double t,
size_t valve_idx);
};
};//end of namespace lib4neuro
#endif //LIB4NEURO_SIMULATOR_H
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
#include "../LearningMethods/RandomSolution.h"
int main(int argc,
char** argv) {
bool normalize_data = true;
double prec = 1e-9;
double prec_lm = 1e-9;
int restart_interval = 500;
int max_n_iters_gradient = 10000;
int max_n_iters_gradient_lm = 10000;
int max_n_iters_swarm = 20;
int n_particles_swarm = 200;
unsigned long batch_size = 0;
int max_number_of_cycles = 1;
try {
/* PHASE 1 - TRAINING DATA LOADING, NETWORK ASSEMBLY AND PARTICLE SWARM OPTIMIZATION */
l4n::CSVReader reader1("/home/fluffymoo/Dropbox/data_BACK_RH_1.csv",
";",
true); // File, separator, skip 1st line
reader1.read(); // Read from the file
/* PHASE 1 - NEURAL NETWORK SPECIFICATION */
/* Create data set for both the first training of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs = {0}; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs = {2}; // Possible multiple outputs, e.g. {1,2}
std::shared_ptr<l4n::DataSet> ds1 = reader1.get_data_set(&inputs,
&outputs); // Creation of data-set for NN
if (normalize_data) {
ds1.operator->()->set_normalization_strategy(new DoubleUnitStrategy());
ds1.get()->normalize(); // Normalization of data to prevent numerical problems
}
/* Numbers of neurons in layers (including input and output layers) */
std::vector<unsigned int> neuron_numbers_in_layers = {1, 3, 1};
/* Fully connected feed-forward network with linear activation functions for input and output */
/* layers and the specified activation fns for the hidden ones (each entry = layer)*/
std::vector<l4n::NEURON_TYPE> hidden_type_v = {l4n::NEURON_TYPE::LOGISTIC, l4n::NEURON_TYPE::LOGISTIC,
l4n::NEURON_TYPE::LOGISTIC, l4n::NEURON_TYPE::LOGISTIC,
l4n::NEURON_TYPE::LOGISTIC}; // hidden_type_v = {l4n::NEURON_TYPE::LOGISTIC, l4n::NEURON_TYPE::LINEAR}
l4n::FullyConnectedFFN nn1(&neuron_numbers_in_layers,
&hidden_type_v);
/* Error function */
l4n::MSE mse1(&nn1,
ds1.get()); // First parameter - neural network, second parameter - data-set
/* Particle Swarm method domain*/
std::vector<double> domain_bounds(2 * (nn1.get_n_weights() + nn1.get_n_biases()));
for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
domain_bounds[2 * i] = -0.1;
domain_bounds[2 * i + 1] = 0.1;
}
// Parameters of the Particle Swarm
// 1) domain_bounds Bounds for every optimized parameter (p1_lower, p1_upper, p2_lower, p2_upper...)
// 2) c1 Cognitive parameter
// 3) c2 Social parameter
// 4) w Inertia weight
// 5) gamma Threshold value for particle velocity - all particles must posses the same or slower velocity for the algorithm to end
// 6) epsilon Radius of the cluster area (Euclidean distance)
// 7) delta Amount of particles, which has to be in the cluster for the algorithm to stop (0-1)
// 8) n_particles Number of particles in the swarm
// 9) iter_max Maximal number of iterations - optimization will stop after that, even if not converged
l4n::ParticleSwarm ps(&domain_bounds,
1.711897,
1.711897,
0.711897,
0.5,
0.3,
0.7,
n_particles_swarm,
max_n_iters_swarm);
// Parameters of the gradient descent
// 1) Threshold for the successful ending of the optimization - deviation from minima
// 2) Number of iterations to reset step size to tolerance/10.0
// 3) Maximal number of iterations - optimization will stop after that, even if not converged
l4n::RandomSolution rnd;
l4n::GradientDescent gs_(prec,
restart_interval,
max_n_iters_gradient,
batch_size);
l4n::GradientDescentBB gs(prec,
restart_interval,
max_n_iters_gradient,
batch_size);
l4n::GradientDescentSingleItem gs_si(prec,
0,
5000);//TODO needs improvement
l4n::LevenbergMarquardt leven(max_n_iters_gradient_lm,
batch_size,
prec_lm);
l4n::LearningSequence learning_sequence(1e-6,
max_number_of_cycles);
std::shared_ptr<l4n::LearningMethod> new_learning_method;
new_learning_method.reset(&rnd);
learning_sequence.add_learning_method(new_learning_method);
std::shared_ptr<l4n::LearningMethod> new_learning_method2;
new_learning_method2.reset(&leven);
learning_sequence.add_learning_method(new_learning_method2);
/* Weight and bias randomization in the network accordingly to the uniform distribution */
nn1.randomize_parameters();
/* Complex Optimization */
learning_sequence.optimize(mse1); // Network training
/* Save Neural network parameters to file */
nn1.save_text("test_net_Gradient_Descent.4n");
/* PHASE 4 - TESTING DATA */
std::string filename = "simulator_output.txt";
std::ofstream output_file(filename);
if (!output_file.is_open()) {
throw std::runtime_error("File '" + filename + "' can't be opened!");
}
l4n::NeuralNetwork nn3("test_net_Gradient_Descent.4n");
/* Check of the saved network - write to the file */
output_file << std::endl << "The loaded network info:" << std::endl;
nn3.write_stats(&output_file);
nn3.write_weights(&output_file);
nn3.write_biases(&output_file);
l4n::CSVReader reader3("/home/fluffymoo/Dropbox/data_BACK_RH_1.csv",
";",
true); // File, separator, skip 1st line
reader3.read(); // Read from the file
std::shared_ptr<l4n::DataSet> ds3 = reader3.get_data_set(&inputs,
&outputs); // Creation of data-set for NN
if (normalize_data) {
ds3.operator->()->set_normalization_strategy(new DoubleUnitStrategy());
ds3.get()->normalize(); // Normalization of data to prevent numerical problems
}
output_file << "Output and the error:" << std::endl;
l4n::MSE mse3(&nn3,
ds3.get()); // First parameter - neural network, second parameter - data-set
mse3.eval_on_data_set(ds3.get(),
&output_file,
nullptr,
normalize_data,
true);
/* Close the output file for writing */
output_file.close();
return 0;
}
catch (const std::exception& e) {
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);
}
}
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 15.3.19 -
*/
#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>
#include "4neuro.h"
#include "../LearningMethods/RandomSolution.h"
int main(int argc,
char** argv) {
//POZOR, NORMALIZACE JE NUTNA A JE TREBA MIT NA PAMETI, ZE MA VLIV NA CASOVE POSUNY
bool normalize_data = true;
double prec = 1e-9;
double prec_lm = 1e-9;
int restart_interval = 500;
int max_n_iters_gradient = 10000;
int max_n_iters_gradient_lm = 100000;
int max_n_iters_swarm = 5;
int n_particles_swarm = 200;
unsigned long batch_size = 0;
int max_number_of_cycles = 1;
try {
/* PHASE 2 - TRAINING DATA LOADING, SIMPLE SIMULATION */
l4n::CSVReader reader1("/home/fluffymoo/Dropbox/data_BACK_RH_1.csv",
";",
true); // File, separator, skip 1st line
reader1.read(); // Read from the file
/* PHASE 2 - NEURAL NETWORK SPECIFICATION */
/* Create data set for both the first training of the neural network */
/* Specify which columns are inputs or outputs */
std::vector<unsigned int> inputs = {0}; // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
std::vector<unsigned int> outputs = {2}; // Possible multiple outputs, e.g. {1,2}
std::shared_ptr<l4n::DataSet> ds1 = reader1.get_data_set(&inputs,
&outputs); // Creation of data-set for NN
if (normalize_data) {
ds1.operator->()->set_normalization_strategy(new DoubleUnitStrategy());
ds1->normalize(); // Normalization of data to prevent numerical problems
}
/* Numbers of neurons in layers (including input and output layers) */
std::vector<size_t> neuron_numbers_in_layers = {1, 5, 5, 1};
/* V CASE 0 NASTAVIME TEPLOTU NA 1135*/
/* for each valve (1 in this example) setup the times of change */
std::vector<std::vector<double>> t;
t.push_back({ds1->get_normalized_value(0)});//DULEZITY RADEK
/* for each valve (1 in this example) setup the magnitudes of change */
std::vector<std::vector<double>> xi;
xi.push_back({ds1->get_normalized_value(1135)});//DULEZITY RADEK
/* The simulator2 object */
l4n::Simulator sim(outputs.size(),
neuron_numbers_in_layers,
t,
xi);
/* Error function */
l4n::MSE mse1(&sim,
ds1.get()); // First parameter - neural network, second parameter - data-set
/* Particle Swarm method domain*/
std::vector<double> domain_bounds(2 * (sim.get_n_weights() + sim.get_n_biases()));
for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
domain_bounds[2 * i] = -0.1;
domain_bounds[2 * i + 1] = 0.1;
}
l4n::LearningSequence learning_sequence(1e-6,
max_number_of_cycles);
auto new_learning_method = std::make_shared<l4n::RandomSolution>();
learning_sequence.add_learning_method(new_learning_method);
auto new_learning_method2 = std::make_shared<l4n::ParticleSwarm>(l4n::ParticleSwarm(&domain_bounds,
1.711897,
1.711897,
0.711897,
0.5,
0.3,
0.7,
n_particles_swarm,
max_n_iters_swarm));
auto new_learning_method3 = std::make_shared<l4n::LevenbergMarquardt>(l4n::LevenbergMarquardt(max_n_iters_gradient_lm,
batch_size,
prec_lm));
learning_sequence.add_learning_method(new_learning_method3);
/* Complex Optimization */
learning_sequence.optimize(mse1); // Network training
/* Save Neural network parameters to file */
sim.save_text("test_net_Gradient_Descent.4n");
/* PHASE 4 - TESTING DATA */
/* Output file specification */
std::string filename = "simulator_output.txt";
std::ofstream output_file(filename);
if (!output_file.is_open()) {
throw std::runtime_error("File '" + filename + "' can't be opened!");
}
/* Neural network loading */
l4n::NeuralNetwork nn3("test_net_Gradient_Descent.4n");
/* Check of the saved network - write to the file */
output_file << std::endl << "The loaded network info:" << std::endl;
nn3.write_stats(&output_file);
nn3.write_weights(&output_file);
nn3.write_biases(&output_file);
/* Evaluate network on an arbitrary data-set and save results into the file */
l4n::CSVReader reader3("/home/fluffymoo/Dropbox/data_BACK_RH_1.csv",
";",
true); // File, separator, skip 1st line
reader3.read(); // Read from the file
/* Create data set for both the testing of the neural network */
/* Specify which columns are inputs or outputs */
std::shared_ptr<l4n::DataSet> ds3 = reader3.get_data_set(&inputs,
&outputs); // Creation of data-set for NN
if (normalize_data) {
ds3.operator->()->set_normalization_strategy(new DoubleUnitStrategy());
ds3.get()->normalize(); // Normalization of data to prevent numerical problems
}
output_file << std::endl << "Evaluating network on the dataset: " << std::endl;
ds3->store_data_text(&output_file);
output_file << "Output and the error:" << std::endl;
/* Error function */
l4n::MSE mse3(&nn3,
ds3.get()); // First parameter - neural network, second parameter - data-set
mse3.eval_on_data_set(ds3.get(),
&output_file,
nullptr,
normalize_data,
true);
/* Close the output file for writing */
output_file.close();
return 0;
}
catch (const std::exception& e) {
std::cerr << e.what() << std::endl;
exit(EXIT_FAILURE);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment