Skip to content
Snippets Groups Projects
simulator_1_2.cpp 5.68 KiB
Newer Older
/**
 * DESCRIPTION OF THE FILE
 *
 * @author Michal Kravčenko
 * @date 15.3.19 -
 */


#include <iostream>
#include <cstdio>
#include <fstream>
#include <vector>
#include <utility>
#include <algorithm>
#include <assert.h>

#include "4neuro.h"
#include "../LearningMethods/RandomSolution.h"

int main(int argc, char** argv) {

    //POZOR, NORMALIZACE JE NUTNA A JE TREBA MIT NA PAMETI, ZE MA VLIV NA CASOVE POSUNY
    bool normalize_data = false;
    double prec = 1e-9;
    double prec_lm = 1e-9;
    int restart_interval = 500;
    int max_n_iters_gradient = 10000;
    int max_n_iters_gradient_lm = 1;
    int max_n_iters_swarm = 200;
    int n_particles_swarm = 200;
    unsigned long batch_size = 0;
    int max_number_of_cycles = 1;
    try {
        /* PHASE 2 - TRAINING DATA LOADING, SIMPLE SIMULATION */
        l4n::CSVReader reader1("/home/martin/Desktop/ANN_MV_process_Data.csv", ";", true);  // File, separator, skip 1st line
        reader1.read();  // Read from the file

        /* PHASE 2 - NEURAL NETWORK SPECIFICATION */
        /* Create data set for both the first training of the neural network */
        /* Specify which columns are inputs or outputs */
        std::vector<unsigned int> inputs = { 0 };  // Possible multiple inputs, e.g. {0,3}, column indices starting from 0
        std::vector<unsigned int> outputs = { 2 };  // Possible multiple outputs, e.g. {1,2}
        std::shared_ptr<l4n::DataSet> ds1 = reader1.get_data_set(&inputs, &outputs);  // Creation of data-set for NN
            ds1.get()->normalize();  // Normalization of data to prevent numerical problems
        }

        /* Numbers of neurons in layers (including input and output layers) */
        std::vector<size_t> neuron_numbers_in_layers = { 1, 4, 4, 1 };

        /* for each valve (1 in this example) setup the times of change */
        std::vector<std::vector<double>> t;
        t.push_back({ds1.get()->get_normalized_value(0), ds1.get()->get_normalized_value(100)});

        /* for each valve (1 in this example) setup the magnitudes of change */
        std::vector<std::vector<double>> xi;
//        xi.push_back({ds1.get_data()->at(0).second});
        xi.push_back({1.0, 5.0});

        /* The simulator2 object */
        l4n::Simulator sim(outputs.size(), neuron_numbers_in_layers, t, xi);

        /* Error function */
//        l4n::MSE mse1(&sim, &ds1);  // First parameter - neural network, second parameter - data-set
//
//        /* Particle Swarm method domain*/
//        std::vector<double> domain_bounds(2 * (sim.get_n_weights() + sim.get_n_biases()));
//        for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
//            domain_bounds[2 * i] = -0.1;
//            domain_bounds[2 * i + 1] = 0.1;
//        }
//
//        l4n::ParticleSwarm ps(&domain_bounds,
//                              1.711897,
//                              1.711897,
//                              0.711897,
//                              0.5,
//                              0.3,
//                              0.7,
//                              n_particles_swarm,
//                              max_n_iters_swarm);
//        l4n::RandomSolution rnd;
//        l4n::LevenbergMarquardt leven(max_n_iters_gradient_lm, batch_size, prec_lm );
//
//        l4n::LearningSequence learning_sequence( 1e-6, max_number_of_cycles );
//        learning_sequence.add_learning_method( &rnd );
////        learning_sequence.add_learning_method( &ps );
//        learning_sequence.add_learning_method( &leven );
//
//
//        /* Complex Optimization */
//        learning_sequence.optimize(mse1);  // Network training

        /* Save Neural network parameters to file */
//        sim.save_text("test_net_Gradient_Descent.4n");

        /* PHASE 4 - TESTING DATA */

//        /* Output file specification */
//        std::string filename = "simulator_output.txt";
//        std::ofstream output_file(filename);
//        if (!output_file.is_open()) {
//            throw std::runtime_error("File '" + filename + "' can't be opened!");
//        }
        /* Neural network loading */
//        l4n::NeuralNetwork nn3("test_net_Gradient_Descent.4n");
//        /* Check of the saved network - write to the file */
//        output_file << std::endl << "The loaded network info:" << std::endl;
//        nn3.write_stats(&output_file);
//        nn3.write_weights(&output_file);
//        nn3.write_biases(&output_file);
////
////        /* Evaluate network on an arbitrary data-set and save results into the file */
//        l4n::CSVReader reader3("/home/martin/Desktop/ANN_MV_process_Data.csv", ";", true);  // File, separator, skip 1st line
//        reader3.read();  // Read from the file
////
////        /* Create data set for both the testing of the neural network */
////        /* Specify which columns are inputs or outputs */
////
//        l4n::DataSet ds3 = reader3.get_data_set(&inputs, &outputs);  // Creation of data-set for NN
//        if(normalize_data){
//            ds3.normalize();  // Normalization of data to prevent numerical problems
//        }
////
////        output_file << std::endl << "Evaluating network on the dataset: " << std::endl;
////        ds3.store_data_text(&output_file);
////
//        output_file << "Output and the error:" << std::endl;
////
////        /* Error function */
//        l4n::MSE mse3(&nn3, &ds3);  // First parameter - neural network, second parameter - data-set
//        mse3.eval_on_data_set(&ds3, &output_file, nullptr, normalize_data, true);
//        /* Close the output file for writing */
//        output_file.close();

        return 0;

    }
    catch (const std::exception& e) {
        std::cerr << e.what() << std::endl;
        exit(EXIT_FAILURE);
    }

}