From 424990dbed189a341a60548c0ac13a2533399a1b Mon Sep 17 00:00:00 2001 From: Martin Beseda <martin.beseda@vsb.cz> Date: Sun, 6 Jan 2019 09:42:14 +0100 Subject: [PATCH] ENH: Several improvements made in Simulator example. --- src/examples/simulator.cpp | 75 ++++++++++++++++++++++++++------------ 1 file changed, 51 insertions(+), 24 deletions(-) diff --git a/src/examples/simulator.cpp b/src/examples/simulator.cpp index cfae4031..b4aa0abe 100644 --- a/src/examples/simulator.cpp +++ b/src/examples/simulator.cpp @@ -29,6 +29,13 @@ int main(int argc, char** argv){ l4n::CSVReader reader("/home/martin/Desktop/data_Heaviside.txt", "\t", true); // File, separator, skip 1st line reader.read(); // Read from the file + /* Open file for writing */ + std::string filename = "simulator_output.txt"; + std::ofstream output_file(filename); + if(!output_file.is_open()) { + throw std::runtime_error("File '" + filename + "' can't be opened!"); + } + /* Create data set for both the training and testing of the neural network */ std::vector<unsigned int> inputs = { 3 }; // Possible multiple inputs, e.g. {0,3} std::vector<unsigned int> outputs = { 1 }; // Possible multiple outputs, e.g. {1,2} @@ -83,7 +90,7 @@ int main(int argc, char** argv){ // 1) Threshold for the successful ending of the optimization - deviation from minima // 2) Number of iterations to reset step size to tolerance/10.0 // 3) Maximal number of iterations - optimization will stop after that, even if not converged - l4n::GradientDescent gs(1e-3, 100, 10); + l4n::GradientDescent gs(1e-3, 100, 200); // Weight and bias randomization in the network according to the uniform distribution // Calling methods nn.randomize_weights() and nn.randomize_biases() @@ -97,21 +104,37 @@ int main(int argc, char** argv){ /* Cross - validation */ l4n::CrossValidator cv(&gs, &mse); - // Parameters: 1) Number of data-set parts used for CV, 2) Number of tests performed - cv.run_k_fold_test(10, 1); + // Parameters: + // 1) Number of data-set parts used for CV + // 2) Number of tests performed + // git 3) File-path to the files with data from cross-validation (one CV run - one file) + cv.run_k_fold_test(10, 3, &output_file); /* Save network to the text file */ nn.save_text("test_net.4n"); - /* Check of the saved network */ + /* Check of the saved network - print to STDOUT */ std::cout << std::endl << "The original network info:" << std::endl; - nn.print_stats(); - nn.print_weights(); + nn.write_stats(); + nn.write_weights(); + nn.write_biases(); l4n::NeuralNetwork nn_loaded("test_net.4n"); std::cout << std::endl << "The loaded network info:" << std::endl; - nn_loaded.print_stats(); - nn_loaded.print_weights(); + nn_loaded.write_stats(); + nn.write_weights(); + nn.write_biases(); + + /* Check of the saved network - write to the file */ + output_file << std::endl << "The original network info:" << std::endl; + nn.write_stats(&output_file); + nn.write_weights(&output_file); + nn.write_biases(&output_file); + + output_file << std::endl << "The loaded network info:" << std::endl; + nn_loaded.write_stats(&output_file); + nn.write_weights(&output_file); + nn.write_biases(&output_file); /* Example of evaluation of a single input, normalized input, de-normalized output */ std::vector<double> input_norm(ds.get_input_dim()), @@ -131,25 +154,29 @@ int main(int argc, char** argv){ ds.de_normalize_single(input_norm, input); ds.de_normalize_single(expected_output_norm, expected_output); - std::cout << std::endl << "input: "; - for (auto el: input_norm) { std::cout << el << ", "; } - std::cout << std::endl; - std::cout << "output: "; - for (auto el: output) { std::cout << el << ", "; } - std::cout << std::endl; - std::cout << "expected output: "; - for (auto el: expected_output) { std::cout << el << ", "; } - std::cout << std::endl; + /* Evaluate network on an arbitrary data-set and save results into the file */ + l4n::DataSet ds2; + + std::vector<double> inp, out; + for(double i = 0; i < 5; i++) { + inp = {i}; + out = {i+2}; + + ds2.add_data_pair(inp, out); + } + + output_file << std::endl << "Evaluating network on the dataset: " << std::endl; + ds2.store_data_text(&output_file); + + output_file << "Output and the error:" << std::endl; + mse.eval_on_data_set(&ds2, &output_file); + + /* Close the output file for writing */ + output_file.close(); return 0; - } catch(const std::runtime_error& e) { - std::cerr << e.what() << std::endl; - exit(EXIT_FAILURE); - } catch(const std::out_of_range& e) { - std::cerr << e.what() << std::endl; - exit(EXIT_FAILURE); - } catch(const std::invalid_argument& e) { + } catch(const std::exception& e) { std::cerr << e.what() << std::endl; exit(EXIT_FAILURE); } -- GitLab