Skip to content
Snippets Groups Projects
NeuralNetwork.cpp 45.5 KiB
Newer Older
/**
 * DESCRIPTION OF THE FILE
 *
 * @author Michal Kravčenko
 * @date 13.6.18 -
#include "../NetConnection/ConnectionFunctionConstant.h"
#include "../mpi_wrapper.h"

namespace lib4neuro{
	int network_evaluation_counter = 0;
	int network_backpropagation_counter = 0;
}

    NeuralNetwork::NeuralNetwork() {
        this->delete_weights  = true;
        this->delete_biases   = true;
    NeuralNetwork::NeuralNetwork(std::string filepath) {
Martin Beseda's avatar
Martin Beseda committed
    NeuralNetwork* NeuralNetwork::get_subnet(::std::vector<size_t>& input_neuron_indices,
                                             ::std::vector<size_t>& output_neuron_indices) {
Martin Beseda's avatar
Martin Beseda committed
        NeuralNetwork* output_net = nullptr;
Michal Kravcenko's avatar
Michal Kravcenko committed
// TODO rework due to the changed structure of the class
Martin Beseda's avatar
Martin Beseda committed
    size_t NeuralNetwork::add_neuron(std::shared_ptr<Neuron> n,
                                     BIAS_TYPE bt,
                                     size_t bias_idx) {
Martin Beseda's avatar
Martin Beseda committed
            this->neuron_bias_indices.push_back(-1);
        } else if (bt == BIAS_TYPE::NEXT_BIAS) {
Martin Beseda's avatar
Martin Beseda committed
            this->neuron_bias_indices.push_back((int) this->neuron_biases.size());
            this->neuron_biases.resize(this->neuron_biases.size() + 1);
        } else if (bt == BIAS_TYPE::EXISTING_BIAS) {
Martin Beseda's avatar
Martin Beseda committed
            if (bias_idx >= this->neuron_biases.size()) {
                THROW_RUNTIME_ERROR("The supplied bias index is too large!");
Martin Beseda's avatar
Martin Beseda committed
            this->neuron_bias_indices.push_back((int) bias_idx);
        this->outward_adjacency.push_back(std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0)));
        this->inward_adjacency.push_back(std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0)));
        this->neurons.push_back(n);
        return this->neurons.size() - 1;
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::eval_single_debug(::std::vector<double>& input,
                                          ::std::vector<double>& output,
                                          std::vector<double>* custom_weights_and_biases) {
Martin Beseda's avatar
Martin Beseda committed
        if ((this->input_neuron_indices.size() * this->output_neuron_indices.size()) <= 0) {
            THROW_INVALID_ARGUMENT_ERROR("Input and output neurons have not been specified!");
        }

Martin Beseda's avatar
Martin Beseda committed
        if (this->input_neuron_indices.size() != input.size()) {
            THROW_INVALID_ARGUMENT_ERROR("Data input size != Network input size");
        }

Martin Beseda's avatar
Martin Beseda committed
        if (this->output_neuron_indices.size() != output.size()) {
            THROW_INVALID_ARGUMENT_ERROR("Data output size != Network output size");
        }
kra568's avatar
kra568 committed

		lib4neuro::network_evaluation_counter++;

        double potential, bias;
        int    bias_idx;

        this->copy_parameter_space(custom_weights_and_biases);

        this->analyze_layer_structure();

        /* reset of the output and the neuron potentials */
Martin Beseda's avatar
Martin Beseda committed
        ::std::fill(output.begin(),
                    output.end(),
                    0.0);
        ::std::fill(this->neuron_potentials.begin(),
                    this->neuron_potentials.end(),
                    0.0);

        /* set the potentials of the input neurons */
Martin Beseda's avatar
Martin Beseda committed
        for (size_t i = 0; i < this->input_neuron_indices.size(); ++i) {
            this->neuron_potentials.at(this->input_neuron_indices.at(i)) = input[i];
            std::cout << this->neuron_potentials.at(this->input_neuron_indices.at(i)) << ", ";
        }
        std::cout << std::endl;



        /* we iterate through all the feed-forward layers and transfer the signals */
        for (auto layer: this->neuron_layers_feedforward) {
            /* we iterate through all neurons in this layer and propagate the signal to the neighboring neurons */

            for (auto si: *layer) {
                bias      = 0.0;
                bias_idx  = this->neuron_bias_indices.at(si);
                if (bias_idx >= 0) {
Martin Beseda's avatar
Martin Beseda committed
                    bias = this->neuron_biases.at(bias_idx);
Martin Beseda's avatar
Martin Beseda committed
                potential = this->neurons.at(si)->activate(this->neuron_potentials.at(si),
                                                           bias);
                std::cout << "Neuron" << si << " (" << this->neuron_potentials.at(si) << " - " << bias << ") -> (" << potential << ")" << std::endl;
//                std::cout << "  applying bias: " << bias << " to neuron potential: " << this->neuron_potentials.at(si)
//                          << " -> " << potential << std::endl;
                for (auto c: *this->outward_adjacency.at(si)) {
                    size_t ti = c.first;
                    size_t ci = c.second;

Martin Beseda's avatar
Martin Beseda committed
                    this->neuron_potentials.at(ti) +=
                        this->connection_list.at(ci)->eval(this->connection_weights) * potential;
                    std::cout << "    EDGE(" << si << ", " << ti << ")" << this->connection_list.at(ci)->eval(this->connection_weights) << std::endl;
//                    std::cout << "  adding input to neuron " << ti << " += "
//                              << this->connection_list.at(ci)->eval(this->connection_weights) << "*" << potential
//                              << std::endl;
                }
            }
        }

        unsigned int i = 0;
        for (auto    oi: this->output_neuron_indices) {
            bias     = 0.0;
Martin Beseda's avatar
Martin Beseda committed
            bias_idx = this->neuron_bias_indices.at(oi);
            if (bias_idx >= 0) {
Martin Beseda's avatar
Martin Beseda committed
                bias = this->neuron_biases.at(bias_idx);
Martin Beseda's avatar
Martin Beseda committed
            output[i] = this->neurons.at(oi)->activate(this->neuron_potentials.at(oi),
                                                       bias);
//            std::cout << "setting the output[" << i << "] = " << output[i] << "(bias = " << bias << ")" << std::endl;
Martin Beseda's avatar
Martin Beseda committed
    NeuralNetwork::add_connection_simple(size_t n1_idx,
                                         size_t n2_idx,
                                         SIMPLE_CONNECTION_TYPE sct,
                                         size_t weight_idx) {
        std::shared_ptr<ConnectionFunctionIdentity> con_weight_u1u2;
        if (sct == SIMPLE_CONNECTION_TYPE::UNITARY_WEIGHT) {
            con_weight_u1u2 = std::make_shared<ConnectionFunctionIdentity>(ConnectionFunctionIdentity());
        } else {
            if (sct == SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT) {
Martin Beseda's avatar
Martin Beseda committed
                weight_idx = this->connection_weights.size();
                this->connection_weights.resize(weight_idx + 1);
            } else if (sct == SIMPLE_CONNECTION_TYPE::EXISTING_WEIGHT) {
Martin Beseda's avatar
Martin Beseda committed
                if (weight_idx >= this->connection_weights.size()) {
                    ::std::cerr << "The supplied connection weight index is too large!\n" << ::std::endl;
            con_weight_u1u2 = std::make_shared<ConnectionFunctionIdentity>(ConnectionFunctionIdentity(weight_idx));
        size_t conn_idx = this->add_new_connection_to_list(con_weight_u1u2);
Martin Beseda's avatar
Martin Beseda committed
        this->add_outward_connection(n1_idx,
                                     n2_idx,
                                     conn_idx);
        this->add_inward_connection(n2_idx,
                                    n1_idx,
                                    conn_idx);
        return this->connection_list.size() - 1;
Martin Beseda's avatar
Martin Beseda committed
    NeuralNetwork::add_connection_constant(size_t n1_idx,
                                           size_t n2_idx,
                                           double weight) {
        std::shared_ptr<ConnectionFunctionConstant> cfc = std::make_shared<ConnectionFunctionConstant>(ConnectionFunctionConstant(weight));
        size_t conn_idx = this->add_new_connection_to_list(cfc);
Martin Beseda's avatar
Martin Beseda committed
        this->add_outward_connection(n1_idx,
                                     n2_idx,
                                     conn_idx);
        this->add_inward_connection(n2_idx,
                                    n1_idx,
                                    conn_idx);
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::add_existing_connection(size_t n1_idx,
                                                size_t n2_idx,
                                                size_t connection_idx,
                                                NeuralNetwork& parent_network) {
        size_t conn_idx = this->add_new_connection_to_list(parent_network.connection_list.at(connection_idx));
Martin Beseda's avatar
Martin Beseda committed
        this->add_outward_connection(n1_idx,
                                     n2_idx,
                                     conn_idx);
        this->add_inward_connection(n2_idx,
                                    n1_idx,
                                    conn_idx);
    void NeuralNetwork::copy_parameter_space(std::vector<double>* parameters) {
Martin Beseda's avatar
Martin Beseda committed
            for (unsigned int i = 0; i < this->connection_weights.size(); ++i) {
                this->connection_weights.at(i) = (*parameters).at(i);
Martin Beseda's avatar
Martin Beseda committed
            for (unsigned int i = 0; i < this->neuron_biases.size(); ++i) {
                (this->neuron_biases).at(i) = (*parameters).at(i + this->connection_weights.size());
Michal Kravcenko's avatar
Michal Kravcenko committed
        }
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::set_parameter_space_pointers(NeuralNetwork& parent_network) {
Martin Beseda's avatar
Martin Beseda committed
        if (!this->connection_weights.empty()) {
            this->connection_weights.clear();
Martin Beseda's avatar
Martin Beseda committed
        this->neuron_biases.clear();
        this->connection_weights = parent_network.connection_weights;
        this->neuron_biases      = parent_network.neuron_biases;
        this->delete_biases  = false;
    void NeuralNetwork::eval_single(::std::vector<double>& input,
                                    ::std::vector<double>& output,
                                    std::vector<double>* custom_weights_and_biases) {
Martin Beseda's avatar
Martin Beseda committed
        if ((this->input_neuron_indices.size() * this->output_neuron_indices.size()) <= 0) {
            THROW_INVALID_ARGUMENT_ERROR("Input and output neurons have not been specified!");
Martin Beseda's avatar
Martin Beseda committed
        if (this->input_neuron_indices.size() != input.size()) {
            THROW_INVALID_ARGUMENT_ERROR("Network input size(" + std::to_string(this->input_neuron_indices.size())
            + ") != Data input size(" + std::to_string(input.size()) + ")");
Martin Beseda's avatar
Martin Beseda committed
        if (this->output_neuron_indices.size() != output.size()) {
            THROW_INVALID_ARGUMENT_ERROR("Data output size != Network output size");
kra568's avatar
kra568 committed

		lib4neuro::network_evaluation_counter++;
kra568's avatar
kra568 committed

        int    bias_idx;
Michal Kravcenko's avatar
Michal Kravcenko committed

        this->copy_parameter_space( custom_weights_and_biases );  // TODO rewrite, so the original parameters are not edited!
Michal Kravcenko's avatar
Michal Kravcenko committed

        this->analyze_layer_structure( );
        /* reset of the output and the neuron potentials */
Martin Beseda's avatar
Martin Beseda committed
        ::std::fill(output.begin(),
                    output.end(),
                    0.0);
        ::std::fill(this->neuron_potentials.begin(),
                    this->neuron_potentials.end(),
                    0.0);
        /* set the potentials of the input neurons */
Martin Beseda's avatar
Martin Beseda committed
        for (size_t i = 0; i < this->input_neuron_indices.size(); ++i) {
            this->neuron_potentials.at(this->input_neuron_indices.at(i)) = input[i];
        /* we iterate through all the feed-forward layers and transfer the signals */
        for (auto layer: this->neuron_layers_feedforward) {
            /* we iterate through all neurons in this layer and propagate the signal to the neighboring neurons */

            for (auto si: *layer) {
                bias      = 0.0;
                bias_idx  = this->neuron_bias_indices.at(si);
Martin Beseda's avatar
Martin Beseda committed
                    bias = this->neuron_biases.at(bias_idx);
Martin Beseda's avatar
Martin Beseda committed
                potential = this->neurons.at(si)->activate(this->neuron_potentials.at(si),
                                                           bias);
                for (auto c: *this->outward_adjacency.at(si)) {
Martin Beseda's avatar
Martin Beseda committed
                    this->neuron_potentials.at(ti) +=
                        this->connection_list.at(ci)->eval(this->connection_weights) * potential;
        for (auto    oi: this->output_neuron_indices) {
            bias     = 0.0;
Martin Beseda's avatar
Martin Beseda committed
            bias_idx = this->neuron_bias_indices.at(oi);
Martin Beseda's avatar
Martin Beseda committed
                bias = this->neuron_biases.at(bias_idx);
Martin Beseda's avatar
Martin Beseda committed
            output[i] = this->neurons.at(oi)->activate(this->neuron_potentials.at(oi),
                                                       bias);
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::add_to_gradient_single(std::vector<double>& input,
                                               ::std::vector<double>& error_derivative,
                                               double error_scaling,
                                               ::std::vector<double>& gradient) {
		lib4neuro::network_backpropagation_counter++;

        ::std::vector<double> scaling_backprog(this->get_n_neurons());
Martin Beseda's avatar
Martin Beseda committed
        ::std::fill(scaling_backprog.begin(),
                    scaling_backprog.end(),
                    0.0);
        size_t bias_shift = this->get_n_weights();
        size_t neuron_idx;
        int    bias_idx;
        double neuron_potential, neuron_potential_t, neuron_bias, connection_weight;
Martin Beseda's avatar
Martin Beseda committed
        NeuronDifferentiable* active_neuron;
        /* initial error propagation */
Martin Beseda's avatar
Martin Beseda committed
        std::shared_ptr<::std::vector<size_t>> current_layer = this->neuron_layers_feedforward.at(
            this->neuron_layers_feedforward.size() - 1);
        //TODO might not work in the future as the output neurons could be permuted
        for (size_t                            i             = 0; i < current_layer->size(); ++i) {
            neuron_idx = current_layer->at(i);
            scaling_backprog[neuron_idx] = error_derivative[i] * error_scaling;
        }
kra568's avatar
kra568 committed

        /* we iterate through all the layers in reverse order and calculate partial derivatives scaled correspondingly */
        for (size_t j = this->neuron_layers_feedforward.size(); j > 0; --j) {
            current_layer = this->neuron_layers_feedforward.at(j - 1);
            for (size_t i = 0; i < current_layer->size(); ++i) {
                neuron_idx    = current_layer->at(i);
Martin Beseda's avatar
Martin Beseda committed
                active_neuron = dynamic_cast<NeuronDifferentiable*> (this->neurons.at(neuron_idx).get());
                if (active_neuron) {
                    bias_idx         = this->neuron_bias_indices.at(neuron_idx);
Martin Beseda's avatar
Martin Beseda committed
                    neuron_potential = this->neuron_potentials.at(neuron_idx);
                    if (bias_idx >= 0) {
Martin Beseda's avatar
Martin Beseda committed
                        neuron_bias = this->neuron_biases.at(bias_idx);
                        gradient[bias_shift + bias_idx] += scaling_backprog[neuron_idx] *
                                                           active_neuron->activation_function_eval_derivative_bias(
                                                               neuron_potential,
                                                               neuron_bias);
                        scaling_backprog[neuron_idx] *= active_neuron->activation_function_eval_derivative(
                            neuron_potential,
                            neuron_bias);
                    /* connections to lower level neurons */
                    for (auto c: *this->inward_adjacency.at(neuron_idx)) {
                        size_t ti = c.first;
                        size_t ci = c.second;
Martin Beseda's avatar
Martin Beseda committed
                        neuron_potential_t = this->neurons.at(ti)->get_last_activation_value();
                        connection_weight  = this->connection_list.at(ci)->eval(this->connection_weights);
                        this->connection_list.at(ci)->eval_partial_derivative(*this->get_parameter_ptr_weights(),
Martin Beseda's avatar
Martin Beseda committed
                                                                              gradient,
                                                                              neuron_potential_t *
                                                                              scaling_backprog[neuron_idx]);
                        scaling_backprog[ti] += scaling_backprog[neuron_idx] * connection_weight;
                    }
                } else {
                    THROW_INVALID_ARGUMENT_ERROR(
                        "Neuron used in backpropagation does not contain differentiable activation function!\n");
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::add_to_gradient_single_debug(std::vector<double>& input,
                                                     ::std::vector<double>& error_derivative,
                                                     double error_scaling,
                                                     ::std::vector<double>& gradient) {
		lib4neuro::network_backpropagation_counter++;
kra568's avatar
kra568 committed

        ::std::vector<double> scaling_backprog(this->get_n_neurons());
Martin Beseda's avatar
Martin Beseda committed
        ::std::fill(scaling_backprog.begin(),
                    scaling_backprog.end(),
                    0.0);

        size_t bias_shift = this->get_n_weights();
        size_t neuron_idx;
        int    bias_idx;
        double neuron_potential, neuron_activation_t, neuron_bias, connection_weight;

Martin Beseda's avatar
Martin Beseda committed
        NeuronDifferentiable* active_neuron;
        std::shared_ptr<::std::vector<size_t>> current_layer = this->neuron_layers_feedforward.at(
            this->neuron_layers_feedforward.size() - 1);
        //TODO might not work in the future as the output neurons could be permuted
        std::cout << "Error scaling on the output layer: ";
        for (size_t i = 0; i < current_layer->size(); ++i) {
            neuron_idx = current_layer->at(i);
            scaling_backprog[neuron_idx] = error_derivative[i] * error_scaling;

            std::cout << scaling_backprog[neuron_idx] << " [neuron " << neuron_idx << "], ";
        }
        std::cout << std::endl;

        /* we iterate through all the layers in reverse order and calculate partial derivatives scaled correspondingly */
        for (size_t j = this->neuron_layers_feedforward.size(); j > 0; --j) {
            current_layer = this->neuron_layers_feedforward.at(j - 1);

            for (size_t i = 0; i < current_layer->size(); ++i) {

                neuron_idx    = current_layer->at(i);
Martin Beseda's avatar
Martin Beseda committed
                active_neuron = dynamic_cast<NeuronDifferentiable*> (this->neurons.at(neuron_idx).get());

                if (active_neuron) {
                    std::cout << "  [backpropagation] active neuron: " << neuron_idx << std::endl;

                    bias_idx         = this->neuron_bias_indices.at(neuron_idx);
Martin Beseda's avatar
Martin Beseda committed
                    neuron_potential = this->neuron_potentials.at(neuron_idx);
Martin Beseda's avatar
Martin Beseda committed
                        neuron_bias = this->neuron_biases.at(bias_idx);
                        gradient[bias_shift + bias_idx] += scaling_backprog[neuron_idx] *
                                                           active_neuron->activation_function_eval_derivative_bias(
                                                               neuron_potential,
                                                               neuron_bias);
                        scaling_backprog[neuron_idx] *= active_neuron->activation_function_eval_derivative(
                            neuron_potential,
                            neuron_bias);
Martin Beseda's avatar
Martin Beseda committed
                    std::cout << "      [backpropagation] scaling coefficient: " << scaling_backprog[neuron_idx]
                              << std::endl;
                    for (auto c: *this->inward_adjacency.at(neuron_idx)) {
Martin Beseda's avatar
Martin Beseda committed
                        neuron_activation_t = this->neurons.at(ti)->get_last_activation_value();
                        connection_weight   = this->connection_list.at(ci)->eval(this->connection_weights);
Martin Beseda's avatar
Martin Beseda committed
                        std::cout << "      [backpropagation] value (" << ti << "): " << neuron_activation_t
                                  << ", scaling: " << scaling_backprog[neuron_idx] << std::endl;
                        this->connection_list.at(ci)->eval_partial_derivative(*this->get_parameter_ptr_weights(),
Martin Beseda's avatar
Martin Beseda committed
                                                                              gradient,
                                                                              neuron_activation_t *
                                                                              scaling_backprog[neuron_idx]);

                        scaling_backprog[ti] += scaling_backprog[neuron_idx] * connection_weight;
                    }
                } else {
                    THROW_INVALID_ARGUMENT_ERROR(
                        "Neuron used in backpropagation does not contain differentiable activation function!\n");
    void NeuralNetwork::randomize_weights() {
        if( lib4neuro::mpi_rank == 0 ){
kra568's avatar
kra568 committed
             boost::random::mt19937 gen(std::time(0));
//            boost::random::mt19937 gen(0);
            // Init weight guess ("optimal" for logistic activation functions)
            double r = 4 * sqrt(6. / (this->connection_weights.size()));
            boost::random::uniform_real_distribution<> dist(-r,r);
            for (size_t i = 0; i < this->connection_weights.size(); i++) {
                this->connection_weights.at(i) = dist(gen);
            }

        MPI_Bcast(
            &this->connection_weights[0],
            this->connection_weights.size(),
            MPI_DOUBLE,
            0,
            lib4neuro::mpi_active_comm
        );
    void NeuralNetwork::randomize_biases() {
        if( lib4neuro::mpi_rank == 0 ){
kra568's avatar
kra568 committed
             boost::random::mt19937 gen(std::time(0));
//            boost::random::mt19937 gen(0);
            // Init weight guess ("optimal" for logistic activation functions)
            boost::random::uniform_real_distribution<> dist(-1,
                                                            1);
            for (size_t                                i = 0; i < this->neuron_biases.size(); i++) {
                this->neuron_biases.at(i) = dist(gen);
            }

        MPI_Bcast(
            &this->neuron_biases[0],
            this->neuron_biases.size(),
            MPI_DOUBLE,
            0,
            lib4neuro::mpi_active_comm
        );
    void NeuralNetwork::randomize_parameters() {
        this->randomize_biases();
        this->randomize_weights();
    }
    void NeuralNetwork::scale_biases(double alpha) {
Martin Beseda's avatar
Martin Beseda committed
        for (size_t i = 0; i < this->get_n_biases(); ++i) {
            this->neuron_biases.at(i) *= alpha;
    void NeuralNetwork::scale_weights(double alpha) {
Martin Beseda's avatar
Martin Beseda committed
        for (size_t i = 0; i < this->get_n_weights(); ++i) {
            this->connection_weights.at(i) *= alpha;
    void NeuralNetwork::scale_parameters(double alpha) {
Martin Beseda's avatar
Martin Beseda committed
        this->scale_biases(alpha);
        this->scale_weights(alpha);
Martin Beseda's avatar
Martin Beseda committed
        return this->input_neuron_indices.size();
    size_t NeuralNetwork::get_n_outputs() {
Martin Beseda's avatar
Martin Beseda committed
        return this->output_neuron_indices.size();
    size_t NeuralNetwork::get_n_weights() {
Martin Beseda's avatar
Martin Beseda committed
        return this->connection_weights.size();
Michal Kravcenko's avatar
Michal Kravcenko committed
    }
Martin Beseda's avatar
Martin Beseda committed
        return this->neuron_biases.size();
    int NeuralNetwork::get_neuron_bias_index(size_t neuron_idx) {
Martin Beseda's avatar
Martin Beseda committed
        return this->neuron_bias_indices.at(neuron_idx);
        return this->neurons.size();
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::specify_input_neurons(std::vector<size_t>& input_neurons_indices) {
Martin Beseda's avatar
Martin Beseda committed
        this->input_neuron_indices = input_neurons_indices;
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::specify_output_neurons(std::vector<size_t>& output_neurons_indices) {
Martin Beseda's avatar
Martin Beseda committed
        this->output_neuron_indices = output_neurons_indices;
    void NeuralNetwork::write_weights() {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

        std::cout << "Connection weights: ";
Martin Beseda's avatar
Martin Beseda committed
        if (!this->connection_weights.empty()) {
            for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
                std::cout << this->connection_weights.at(i) << ", ";
Martin Beseda's avatar
Martin Beseda committed
            std::cout << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
        }
    }

    void NeuralNetwork::write_weights(std::string file_path) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

Martin Beseda's avatar
Martin Beseda committed
        if (!ofs.is_open()) {
            THROW_RUNTIME_ERROR("File " + file_path + " can not be opened!");
        }

        ofs << "Connection weights: ";

Martin Beseda's avatar
Martin Beseda committed
        if (!this->connection_weights.empty()) {
            for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
                ofs << this->connection_weights.at(i) << ", ";
Martin Beseda's avatar
Martin Beseda committed
            ofs << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
        }
    }

    void NeuralNetwork::write_weights(std::ofstream* file_path) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

Martin Beseda's avatar
Martin Beseda committed
        if (!this->connection_weights.empty()) {
            for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
                *file_path << this->connection_weights.at(i) << ", ";
Martin Beseda's avatar
Martin Beseda committed
            *file_path << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
    void NeuralNetwork::write_biases() {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

Martin Beseda's avatar
Martin Beseda committed
        if (!this->neuron_biases.empty()) {
            for (unsigned int i = 0; i < this->neuron_biases.size() - 1; i++) {
Martin Beseda's avatar
Martin Beseda committed
                std::cout << this->neuron_biases.at(i) << ", ";
Martin Beseda's avatar
Martin Beseda committed
            std::cout << this->neuron_biases.at(this->neuron_biases.size() - 1) << std::endl;
    void NeuralNetwork::write_biases(std::string file_path) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

Martin Beseda's avatar
Martin Beseda committed
        if (!ofs.is_open()) {
            THROW_RUNTIME_ERROR("File " + file_path + " can not be opened!");
        }

        ofs << "Network biases: ";

Martin Beseda's avatar
Martin Beseda committed
        if (!this->neuron_biases.empty()) {
            for (unsigned int i = 0; i < this->neuron_biases.size() - 1; i++) {
Martin Beseda's avatar
Martin Beseda committed
                ofs << this->neuron_biases.at(i) << ", ";
Martin Beseda's avatar
Martin Beseda committed
            ofs << this->neuron_biases.at(this->neuron_biases.size() - 1) << std::endl;
        }
    }

    void NeuralNetwork::write_biases(std::ofstream* file_path) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

Martin Beseda's avatar
Martin Beseda committed
        if (!this->neuron_biases.empty()) {
            for (unsigned int i = 0; i < this->neuron_biases.size() - 1; i++) {
Martin Beseda's avatar
Martin Beseda committed
                *file_path << this->neuron_biases.at(i) << ", ";
Martin Beseda's avatar
Martin Beseda committed
            *file_path << this->neuron_biases.at(this->neuron_biases.size() - 1) << std::endl;
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

                    << "Number of neurons: " << this->neurons.size() << ::std::endl
                    << "Number of connections: " << this->connection_list.size() << ::std::endl
Martin Beseda's avatar
Martin Beseda committed
                    << "Number of active weights: " << this->connection_weights.size() << ::std::endl
Martin Beseda's avatar
Martin Beseda committed
                    << "Number of active biases: " << this->neuron_biases.size() << ::std::endl;
Martin Beseda's avatar
Martin Beseda committed
        if (this->normalization_strategy) {
            ::std::cout << std::flush
                        << "Normalization strategy maximum value: "
                        << this->normalization_strategy->get_max_value() << std::endl
                        << "Normalization strategy minimum value: "
                        << this->normalization_strategy->get_min_value()
                        << std::endl;
        }
    void NeuralNetwork::write_stats(std::string file_path) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

Martin Beseda's avatar
Martin Beseda committed
        if (!ofs.is_open()) {
            THROW_RUNTIME_ERROR("File " + file_path + " can not be opened!");
        }

        ofs << "Number of neurons: " << this->neurons.size() << ::std::endl
            << "Number of connections: " << this->connection_list.size() << ::std::endl
Martin Beseda's avatar
Martin Beseda committed
            << "Number of active weights: " << this->connection_weights.size() << ::std::endl
Martin Beseda's avatar
Martin Beseda committed
            << "Number of active biases: " << this->neuron_biases.size() << ::std::endl;
Martin Beseda's avatar
Martin Beseda committed
        if (this->normalization_strategy) {
            ofs << "Normalization strategy maximum value: "
                << this->normalization_strategy->get_max_value() << std::endl
                << "Normalization strategy minimum value: "
                << this->normalization_strategy->get_min_value()
                << std::endl;
        }

        ofs.close();
    }

    void NeuralNetwork::write_stats(std::ofstream* file_path) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

        *file_path << "Number of neurons: " << this->neurons.size() << ::std::endl
                   << "Number of connections: " << this->connection_list.size() << ::std::endl
Martin Beseda's avatar
Martin Beseda committed
                   << "Number of active weights: " << this->connection_weights.size() << ::std::endl
Martin Beseda's avatar
Martin Beseda committed
                   << "Number of active biases: " << this->neuron_biases.size() << ::std::endl;
Martin Beseda's avatar
Martin Beseda committed
        if (this->normalization_strategy) {
            *file_path << "Normalization strategy maximum value: "
                       << this->normalization_strategy->get_max_value() << std::endl
                       << "Normalization strategy minimum value: "
                       << this->normalization_strategy->get_min_value()
                       << std::endl;
        }
    }

Martin Beseda's avatar
Martin Beseda committed
    std::vector<double>* NeuralNetwork::get_parameter_ptr_biases() {
Martin Beseda's avatar
Martin Beseda committed
        return &this->neuron_biases;
Martin Beseda's avatar
Martin Beseda committed
    std::vector<double>* NeuralNetwork::get_parameter_ptr_weights() {
        return &this->connection_weights;

    size_t NeuralNetwork::add_new_connection_to_list(std::shared_ptr<ConnectionFunctionGeneral> con) {
        this->connection_list.push_back(con);
        return this->connection_list.size() - 1;
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::add_inward_connection(size_t s,
                                              size_t t,
                                              size_t con_idx) {
        if (!this->inward_adjacency.at(s)) {
            this->inward_adjacency.at(s) = std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0));
Michal Kravcenko's avatar
Michal Kravcenko committed
        }
Martin Beseda's avatar
Martin Beseda committed
        this->inward_adjacency.at(s)->push_back(std::pair<size_t, size_t>(t,
                                                                          con_idx));
Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::add_outward_connection(size_t s,
                                               size_t t,
                                               size_t con_idx) {
        if (!this->outward_adjacency.at(s)) {
            this->outward_adjacency.at(s) = std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0));
Michal Kravcenko's avatar
Michal Kravcenko committed
        }
Martin Beseda's avatar
Martin Beseda committed
        this->outward_adjacency.at(s)->push_back(std::pair<size_t, size_t>(t,
                                                                           con_idx));
    void NeuralNetwork::analyze_layer_structure() {
        if (this->layers_analyzed) {
            //nothing to do
            return;
        }
Martin Beseda's avatar
Martin Beseda committed
        this->neuron_potentials.resize(this->get_n_neurons());
        this->neuron_layers_feedforward.clear();
        auto n = this->neurons.size();
        ::std::vector<size_t> inward_saturation(n);
        ::std::vector<size_t> outward_saturation(n);
Martin Beseda's avatar
Martin Beseda committed
        ::std::fill(inward_saturation.begin(),
                    inward_saturation.end(),
                    0);
        ::std::fill(outward_saturation.begin(),
                    outward_saturation.end(),
                    0);
            if (this->inward_adjacency.at(i)) {
                inward_saturation[i] = this->inward_adjacency.at(i)->size();
            if (this->outward_adjacency.at(i)) {
                outward_saturation[i] = this->outward_adjacency.at(i)->size();
        ::std::vector<size_t> active_eval_set(2 * n);
        size_t                active_set_size[2];
        /* feedforward analysis */
        active_set_size[0] = 0;
        active_set_size[1] = 0;
        size_t idx1 = 0, idx2 = 1;

        active_set_size[0] = this->get_n_inputs();
        size_t i = 0;
        for (i = 0; i < this->get_n_inputs(); ++i) {
Martin Beseda's avatar
Martin Beseda committed
            active_eval_set[i] = this->input_neuron_indices.at(i);
        }

        size_t active_ni;
        while (active_set_size[idx1] > 0) {

            /* we add the current active set as the new outward layer */
            std::shared_ptr<::std::vector<size_t>> new_feedforward_layer = std::make_shared<::std::vector<size_t>>(::std::vector<size_t>(active_set_size[idx1]));
            this->neuron_layers_feedforward.push_back(new_feedforward_layer);

            //we iterate through the active neurons and propagate the signal
            for (i = 0; i < active_set_size[idx1]; ++i) {
                active_ni = active_eval_set[i + n * idx1];
                new_feedforward_layer->at(i) = active_ni;
                if (!this->outward_adjacency.at(active_ni)) {
                for (auto ni: *(this->outward_adjacency.at(active_ni))) {
                    if (inward_saturation[ni.first] == 0) {
                        active_eval_set[active_set_size[idx2] + n * idx2] = ni.first;
                        active_set_size[idx2]++;
                    }
    void NeuralNetwork::init_from_file(const std::string &filepath) {

        for( int i = 0; i < lib4neuro::mpi_nranks; ++i ){
            if( i == lib4neuro::mpi_rank ){
                ::std::ifstream ifs(filepath);
                if (ifs.is_open()) {
                    try {
                        boost::archive::text_iarchive ia(ifs);
                        ia >> *this;
                    }
                    catch (boost::archive::archive_exception& e) {
                        THROW_RUNTIME_ERROR(
                                "Serialized archive error: '" + e.what() + "'! Please, check if your file is really "
                                                                           "the serialized DataSet.");
                    }
                    ifs.close();
                } else {
                    THROW_RUNTIME_ERROR("File '" + filepath + "' couldn't be open!");
                }
            MPI_Barrier(lib4neuro::mpi_active_comm);
    void NeuralNetwork::save_text(std::string filepath) {
        if( lib4neuro::mpi_rank > 0 ){
            return;
        }

        ::std::ofstream ofs(filepath);
        {
            boost::archive::text_oarchive oa(ofs);
            oa << *this;
            ofs.close();
        }
    NormalizationStrategy* NeuralNetwork::get_normalization_strategy_instance() {
        return this->normalization_strategy;
    }

Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::set_normalization_strategy_instance(NormalizationStrategy* ns) {
        if (!ns) {
            THROW_RUNTIME_ERROR("Argument 'ns' is not initialized!");
    FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
                                         NEURON_TYPE hidden_layer_neuron_type,
                                         std::ofstream* ofs) : NeuralNetwork() {
        for (size_t i = 0; i < neuron_numbers->size(); i++) {
Martin Beseda's avatar
Martin Beseda committed
        this->init(neuron_numbers,
                   &tmp,
                   ofs);
    }

    FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
                                         std::vector<lib4neuro::NEURON_TYPE>* hidden_layer_neuron_types,
                                         std::ofstream* ofs) : NeuralNetwork() {
Martin Beseda's avatar
Martin Beseda committed
        this->init(neuron_numbers,
                   hidden_layer_neuron_types,
                   ofs);
    void FullyConnectedFFN::init(std::vector<unsigned int>* neuron_numbers,
                                 std::vector<NEURON_TYPE>* hidden_layer_neuron_types,
                                 std::ofstream* ofs) {
Martin Beseda's avatar
Martin Beseda committed
        if (neuron_numbers->size() < 2) {
            THROW_INVALID_ARGUMENT_ERROR("Parameter 'neuron_numbers' specifying numbers of neurons in network's layers "
                                         "doesn't specify input and output layers, which are compulsory!");
        this->delete_weights  = true;
        this->delete_biases   = true;
        unsigned int inp_dim = neuron_numbers->at(0);  //!< Network input dimension
        unsigned int out_dim = neuron_numbers->back(); //!< Network output dimension

        COUT_DEBUG("Fully connected feed-forward network being constructed:" << std::endl);
        COUT_DEBUG("# of inputs: " << inp_dim << std::endl);
        COUT_DEBUG("# of outputs: " << out_dim << std::endl);
Martin Beseda's avatar
Martin Beseda committed
        WRITE_TO_OFS_DEBUG(ofs,
                           "Fully connected feed-forward network being constructed:" << std::endl
                                                                                     << "# of inputs: " << inp_dim
                                                                                     << std::endl
                                                                                     << "# of outputs: " << out_dim
                                                                                     << std::endl);
        std::vector<size_t> input_layer_neuron_indices;
        std::vector<size_t> previous_layer_neuron_indices;
        std::vector<size_t> current_layer_neuron_indices;

        /* Creation of INPUT layer neurons */
        current_layer_neuron_indices.reserve(inp_dim);
        input_layer_neuron_indices.reserve(inp_dim);
Martin Beseda's avatar
Martin Beseda committed
        for (unsigned int i = 0; i < inp_dim; i++) {
            std::shared_ptr<Neuron> new_neuron;
            new_neuron.reset(new NeuronLinear());
Martin Beseda's avatar
Martin Beseda committed
            size_t neuron_id = this->add_neuron(new_neuron,
                                                BIAS_TYPE::NO_BIAS);
            current_layer_neuron_indices.emplace_back(neuron_id);
        }
        input_layer_neuron_indices = current_layer_neuron_indices;

        /* Creation of HIDDEN layers */
Martin Beseda's avatar
Martin Beseda committed
        for (unsigned int i = 1; i <= neuron_numbers->size() - 2; i++) {
            COUT_DEBUG("Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
Martin Beseda's avatar
Martin Beseda committed
            WRITE_TO_OFS_DEBUG(ofs,
                               "Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
            previous_layer_neuron_indices.reserve(neuron_numbers->at(i - 1));
            previous_layer_neuron_indices = current_layer_neuron_indices;
            current_layer_neuron_indices.clear();
            current_layer_neuron_indices.reserve(neuron_numbers->at(i));

            /* Creation of one single hidden layer */
Martin Beseda's avatar
Martin Beseda committed
            for (unsigned int j = 0; j < neuron_numbers->at(i); j++) {
                size_t neuron_id;

                /* Create new hidden neuron */
Martin Beseda's avatar
Martin Beseda committed
                switch (hidden_layer_neuron_types->at(i - 1)) {
                    case NEURON_TYPE::BINARY: {
                        std::shared_ptr<Neuron> new_neuron;
                        new_neuron.reset(new NeuronBinary());
Martin Beseda's avatar
Martin Beseda committed
                        neuron_id = this->add_neuron(new_neuron,
                                                     BIAS_TYPE::NEXT_BIAS);
                        COUT_DEBUG("Added BINARY neuron." << std::endl);
Martin Beseda's avatar
Martin Beseda committed
                        WRITE_TO_OFS_DEBUG(ofs,
                                           "Added BINARY neuron." << std::endl);
                    case NEURON_TYPE::CONSTANT: {
                        THROW_INVALID_ARGUMENT_ERROR("Constant neurons can't be used in fully connected feed-forward networks!");
                    case NEURON_TYPE::LINEAR: {
                        std::shared_ptr<Neuron> new_neuron;
                        new_neuron.reset(new NeuronLinear());
Martin Beseda's avatar
Martin Beseda committed
                        neuron_id = this->add_neuron(new_neuron,
                                                     BIAS_TYPE::NEXT_BIAS);
                        COUT_DEBUG("Added LINEAR neuron." << std::endl);
Martin Beseda's avatar
Martin Beseda committed
                        WRITE_TO_OFS_DEBUG(ofs,
                                           "Added LINEAR neuron." << std::endl);
                    case NEURON_TYPE::LOGISTIC: {
                        std::shared_ptr<Neuron> new_neuron;
                        new_neuron.reset(new NeuronLogistic());
Martin Beseda's avatar
Martin Beseda committed
                        neuron_id = this->add_neuron(new_neuron,
                                                     BIAS_TYPE::NEXT_BIAS);
                        COUT_DEBUG("Added LOGISTIC neuron." << std::endl);
Martin Beseda's avatar
Martin Beseda committed
                        WRITE_TO_OFS_DEBUG(ofs,
                                           "Added LINEAR neuron." << std::endl);
                        break;
                    }
                }

                current_layer_neuron_indices.emplace_back(neuron_id);

                /* Connect new neuron with all neurons from the previous layer */
Martin Beseda's avatar
Martin Beseda committed
                for (auto ind : previous_layer_neuron_indices) {
                    this->add_connection_simple(ind,
                                                neuron_id,
                                                lib4neuro::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
Martin Beseda's avatar
Martin Beseda committed
        previous_layer_neuron_indices.reserve(neuron_numbers->back() - 1);
        previous_layer_neuron_indices = current_layer_neuron_indices;
        current_layer_neuron_indices.clear();
        current_layer_neuron_indices.reserve(out_dim);

        /* Creation of OUTPUT layer neurons */
Martin Beseda's avatar
Martin Beseda committed
        for (unsigned int i = 0; i < out_dim; i++) {
            std::shared_ptr<Neuron> new_neuron;
            new_neuron.reset(new NeuronLinear());
Martin Beseda's avatar
Martin Beseda committed
            size_t neuron_id = this->add_neuron(new_neuron,
                                                BIAS_TYPE::NO_BIAS);
            current_layer_neuron_indices.emplace_back(neuron_id);

            /* Connect new neuron with all neuron from the previous layer */
Martin Beseda's avatar
Martin Beseda committed
            for (auto ind : previous_layer_neuron_indices) {
                this->add_connection_simple(ind,
                                            neuron_id,
                                            lib4neuro::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
            }
        }

        /* Init variables containing indices of INPUT nad OUTPUT neurons */
        this->input_neuron_indices  = input_layer_neuron_indices;
Martin Beseda's avatar
Martin Beseda committed
        this->output_neuron_indices = current_layer_neuron_indices;
        this->analyze_layer_structure( );
        this->randomize_parameters( );
Martin Beseda's avatar
Martin Beseda committed

Martin Beseda's avatar
Martin Beseda committed
    void NeuralNetwork::get_jacobian(std::vector<std::vector<double>>& jacobian,
                                     std::pair<std::vector<double>, std::vector<double>>& data,
                                     std::vector<double>& error) {
Martin Beseda's avatar
Martin Beseda committed

        std::vector<double> fv(this->get_n_outputs());

        jacobian.resize(this->get_n_outputs());
        error.resize(this->get_n_outputs());
Martin Beseda's avatar
Martin Beseda committed
        for (size_t i = 0; i < this->get_n_outputs(); ++i) {
Martin Beseda's avatar
Martin Beseda committed
            jacobian[i].resize(this->get_n_weights() + this->get_n_biases());
Martin Beseda's avatar
Martin Beseda committed
            std::fill(jacobian[i].begin(),
                      jacobian[i].end(),
                      0);
Martin Beseda's avatar
Martin Beseda committed
        }

Martin Beseda's avatar
Martin Beseda committed
        this->eval_single(data.first,
                          fv);
Martin Beseda's avatar
Martin Beseda committed

        std::vector<double> error_partial(this->get_n_outputs());
Martin Beseda's avatar
Martin Beseda committed
        std::fill(error_partial.begin(),
                  error_partial.end(),
                  0.0);
Martin Beseda's avatar
Martin Beseda committed

Martin Beseda's avatar
Martin Beseda committed
        for (size_t i = 0; i < this->get_n_outputs(); ++i) {
Martin Beseda's avatar
Martin Beseda committed
            error_partial[i] = 1;
Martin Beseda's avatar
Martin Beseda committed
            this->add_to_gradient_single(data.first,
                                         error_partial,
                                         1.0,
                                         jacobian[i]);
            error[i]         = data.second[i] - fv[i];
Martin Beseda's avatar
Martin Beseda committed
            error_partial[i] = 0;
        }
    }

    std::pair<double, double> NeuralNetwork::get_min_max_weight() {
        return std::make_pair(*std::min_element(this->connection_weights.begin(), this->connection_weights.end()),
                              *std::max_element(this->connection_weights.begin(), this->connection_weights.end()));
    }

    size_t NeuralNetwork::get_input_neurons_number() {
        return this->input_neuron_indices.size();
    }

    size_t NeuralNetwork::get_output_neurons_number() {
        return this->output_neuron_indices.size();
    }