Skip to content
Snippets Groups Projects
NeuralNetwork.cpp 52.7 KiB
Newer Older
    std::vector<double> *NeuralNetwork::get_parameter_ptr_biases() {
        return this->neuron_biases;
    }
    std::vector<double> *NeuralNetwork::get_parameter_ptr_weights() {
        return this->connection_weights;
    size_t NeuralNetwork::add_new_connection_to_list(ConnectionFunctionGeneral *con) {
        this->connection_list->push_back(con);
        return this->connection_list->size() - 1;
    }
    void NeuralNetwork::add_inward_connection(size_t s, size_t t, size_t con_idx) {
        if (!this->inward_adjacency->at(s)) {
            this->inward_adjacency->at(s) = new ::std::vector<std::pair<size_t, size_t>>(0);
        this->inward_adjacency->at(s)->push_back(std::pair<size_t, size_t>(t, con_idx));
    void NeuralNetwork::add_outward_connection(size_t s, size_t t, size_t con_idx) {
        if (!this->outward_adjacency->at(s)) {
            this->outward_adjacency->at(s) = new ::std::vector<std::pair<size_t, size_t>>(0);
        this->outward_adjacency->at(s)->push_back(std::pair<size_t, size_t>(t, con_idx));
    void NeuralNetwork::analyze_layer_structure() {
        /* buffer preparation */
        this->neuron_potentials->resize(this->get_n_neurons());
        /* space allocation */
        if (this->neuron_layers_feedforward) {
            for (auto e: *this->neuron_layers_feedforward) {
                delete e;
                e = nullptr;
            }
            delete this->neuron_layers_feedforward;
            this->neuron_layers_feedforward = nullptr;
//    if(this->neuron_layers_feedbackward){
//        for(auto e: *this->neuron_layers_feedbackward){
//            delete e;
//            e = nullptr;
//        }
//        delete this->neuron_layers_feedbackward;
//        this->neuron_layers_feedbackward = nullptr;
//    }
        this->neuron_layers_feedforward = new ::std::vector<std::vector<size_t> *>(0);
//    this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t>*>(0);
        ::std::vector<size_t> inward_saturation(n);
        ::std::vector<size_t> outward_saturation(n);
        ::std::fill(inward_saturation.begin(), inward_saturation.end(), 0);
        ::std::fill(outward_saturation.begin(), outward_saturation.end(), 0);
        for (unsigned int i = 0; i < n; ++i) {
            if (this->inward_adjacency->at(i)) {
                inward_saturation[i] = this->inward_adjacency->at(i)->size();
            }
            if (this->outward_adjacency->at(i)) {
                outward_saturation[i] = this->outward_adjacency->at(i)->size();
            }
        }
        ::std::vector<size_t> active_eval_set(2 * n);
        /* feedforward analysis */
        active_set_size[0] = 0;
        active_set_size[1] = 0;
        size_t idx1 = 0, idx2 = 1;

        active_set_size[0] = this->get_n_inputs();
        size_t i = 0;
        for (i = 0; i < this->get_n_inputs(); ++i) {
            active_eval_set[i] = this->input_neuron_indices->at(i);
        }

        size_t active_ni;
        while (active_set_size[idx1] > 0) {

            /* we add the current active set as the new outward layer */
            ::std::vector<size_t> *new_feedforward_layer = new ::std::vector<size_t>(active_set_size[idx1]);
            this->neuron_layers_feedforward->push_back(new_feedforward_layer);

            //we iterate through the active neurons and propagate the signal
            for (i = 0; i < active_set_size[idx1]; ++i) {
                active_ni = active_eval_set[i + n * idx1];
                new_feedforward_layer->at(i) = active_ni;
                if (!this->outward_adjacency->at(active_ni)) {
                    continue;
                }

                for (auto ni: *(this->outward_adjacency->at(active_ni))) {
                    inward_saturation[ni.first]--;
                    if (inward_saturation[ni.first] == 0) {
                        active_eval_set[active_set_size[idx2] + n * idx2] = ni.first;
                        active_set_size[idx2]++;
                    }
//    /* feed backward analysis */
//    active_set_size[0] = 0;
//    active_set_size[1] = 0;
//
//    idx1 = 0;
//    idx2 = 1;
//
//    active_set_size[0] = this->get_n_outputs();
//    for(i = 0; i < this->get_n_outputs(); ++i){
//        active_eval_set[i] = this->output_neuron_indices->at(i);
//    }
//
//    while(active_set_size[idx1] > 0){
//
//        /* we add the current active set as the new outward layer */
//        ::std::vector<size_t> *new_feedbackward_layer = new ::std::vector<size_t>(active_set_size[idx1]);
//        this->neuron_layers_feedbackward->push_back( new_feedbackward_layer );
//
//        //we iterate through the active neurons and propagate the signal backward
//        for(i = 0; i < active_set_size[idx1]; ++i){
//            active_ni = active_eval_set[i + n * idx1];
//            new_feedbackward_layer->at( i ) = active_ni;
//
//            if(!this->inward_adjacency->at(active_ni)){
//                continue;
//            }
//
//            for(auto ni: *(this->inward_adjacency->at(active_ni))){
//                outward_saturation[ni.first]--;
//
//                if(outward_saturation[ni.first] == 0){
//                    active_eval_set[active_set_size[idx2] + n * idx2] = ni.first;
//                    active_set_size[idx2]++;
//                }
//            }
//        }
//
//        idx1 = idx2;
//        idx2 = (idx1 + 1) % 2;
//
//        active_set_size[idx2] = 0;
//    }
    void NeuralNetwork::save_text(std::string filepath) {
        ::std::ofstream ofs(filepath);
        {
            boost::archive::text_oarchive oa(ofs);
            oa << *this;
            ofs.close();
        }
    NormalizationStrategy* NeuralNetwork::get_normalization_strategy_instance() {
        return this->normalization_strategy;
    }

    void NeuralNetwork::set_normalization_strategy_instance(NormalizationStrategy *ns) {
        if(!ns) {
            THROW_RUNTIME_ERROR("Argument 'ns' is not initialized!");
    FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
                                         NEURON_TYPE hidden_layer_neuron_type,
                                         std::ofstream* ofs) : NeuralNetwork() {
        std::vector<NEURON_TYPE> tmp;

        for(auto i = 0; i < neuron_numbers->size(); i++) {
            tmp.emplace_back(hidden_layer_neuron_type);
        }

    }

    FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
                                         std::vector<lib4neuro::NEURON_TYPE>* hidden_layer_neuron_types,
                                         std::ofstream* ofs) : NeuralNetwork() {
        this->init(neuron_numbers, hidden_layer_neuron_types, ofs);
    void FullyConnectedFFN::init(std::vector<unsigned int>* neuron_numbers,
                                 std::vector<NEURON_TYPE>* hidden_layer_neuron_types,
                                 std::ofstream* ofs) {
        if(neuron_numbers->size() < 2) {
            THROW_INVALID_ARGUMENT_ERROR("Parameter 'neuron_numbers' specifying numbers of neurons in network's layers "
                                         "doesn't specify input and output layers, which are compulsory!");
        this->neurons = new ::std::vector<Neuron *>(0);
        this->neuron_biases = new ::std::vector<double>(0);
        this->neuron_potentials = new ::std::vector<double>(0);
        this->neuron_bias_indices = new ::std::vector<int>(0);

        this->connection_weights = new ::std::vector<double>(0);
        this->connection_list = new ::std::vector<ConnectionFunctionGeneral *>(0);
        this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
        this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);

        this->neuron_layers_feedforward = new ::std::vector<std::vector<size_t> *>(0);
        this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t> *>(0);

        this->input_neuron_indices = new ::std::vector<size_t>(0);
        this->output_neuron_indices = new ::std::vector<size_t>(0);

        this->delete_weights = true;
        this->delete_biases = true;
        this->layers_analyzed = false;

        unsigned int inp_dim = neuron_numbers->at(0);  //!< Network input dimension
        unsigned int out_dim = neuron_numbers->back(); //!< Network output dimension

        COUT_DEBUG("Fully connected feed-forward network being constructed:" << std::endl);
        COUT_DEBUG("# of inputs: " << inp_dim << std::endl);
        COUT_DEBUG("# of outputs: " << out_dim << std::endl);
        WRITE_TO_OFS_DEBUG(ofs, "Fully connected feed-forward network being constructed:" << std::endl
Martin Beseda's avatar
Martin Beseda committed
                                                                                          << "# of inputs: " << inp_dim << std::endl
                                                                                          << "# of outputs: " << out_dim << std::endl);
        std::vector<size_t> input_layer_neuron_indices;
        std::vector<size_t> previous_layer_neuron_indices;
        std::vector<size_t> current_layer_neuron_indices;

        /* Creation of INPUT layer neurons */
        current_layer_neuron_indices.reserve(inp_dim);
        input_layer_neuron_indices.reserve(inp_dim);
        for(unsigned int i = 0; i < inp_dim; i++) {
            size_t neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NO_BIAS);
            current_layer_neuron_indices.emplace_back(neuron_id);
        }
        input_layer_neuron_indices = current_layer_neuron_indices;

        /* Creation of HIDDEN layers */
        for(unsigned int i = 1; i <= neuron_numbers->size()-2; i++) {
            COUT_DEBUG("Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
            WRITE_TO_OFS_DEBUG(ofs, "Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
            previous_layer_neuron_indices.reserve(neuron_numbers->at(i-1));
            previous_layer_neuron_indices = current_layer_neuron_indices;
            current_layer_neuron_indices.clear();
            current_layer_neuron_indices.reserve(neuron_numbers->at(i));

            /* Creation of one single hidden layer */
            for(unsigned int j = 0; j < neuron_numbers->at(i); j++) {
                size_t neuron_id;

                /* Create new hidden neuron */
                switch (hidden_layer_neuron_types->at(i-1)) {
                    case NEURON_TYPE::BINARY: {
                        neuron_id = this->add_neuron(new NeuronBinary, BIAS_TYPE::NEXT_BIAS);
                        COUT_DEBUG("Added BINARY neuron." << std::endl);
                        WRITE_TO_OFS_DEBUG(ofs, "Added BINARY neuron." << std::endl);
                    case NEURON_TYPE::CONSTANT: {
                        THROW_INVALID_ARGUMENT_ERROR("Constant neurons can't be used in fully connected feed-forward networks!");
                    case NEURON_TYPE::LINEAR: {
                        neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NEXT_BIAS);
                        COUT_DEBUG("Added LINEAR neuron." << std::endl);
                        WRITE_TO_OFS_DEBUG(ofs, "Added LINEAR neuron." << std::endl);
                    case NEURON_TYPE::LOGISTIC: {
                        neuron_id = this->add_neuron(new NeuronLogistic, BIAS_TYPE::NEXT_BIAS);
                        COUT_DEBUG("Added LOGISTIC neuron." << std::endl);
                        WRITE_TO_OFS_DEBUG(ofs, "Added LINEAR neuron." << std::endl);
                        break;
                    }
                }

                current_layer_neuron_indices.emplace_back(neuron_id);

                /* Connect new neuron with all neurons from the previous layer */
                for(auto ind : previous_layer_neuron_indices) {
                    this->add_connection_simple(ind, neuron_id, l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
                }
            }
        }

        previous_layer_neuron_indices.reserve(neuron_numbers->back()-1);
        previous_layer_neuron_indices = current_layer_neuron_indices;
        current_layer_neuron_indices.clear();
        current_layer_neuron_indices.reserve(out_dim);

        /* Creation of OUTPUT layer neurons */
        for(unsigned int i = 0; i < out_dim; i++) {
            size_t neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NO_BIAS);
            current_layer_neuron_indices.emplace_back(neuron_id);

            /* Connect new neuron with all neuron from the previous layer */
            for(auto ind : previous_layer_neuron_indices) {
                this->add_connection_simple(ind, neuron_id, l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
            }
        }

        /* Init variables containing indices of INPUT nad OUTPUT neurons */
        this->input_neuron_indices = new ::std::vector<size_t>(inp_dim);
        this->output_neuron_indices = new ::std::vector<size_t>(out_dim);

        *this->input_neuron_indices = input_layer_neuron_indices;
        *this->output_neuron_indices = current_layer_neuron_indices;
Martin Beseda's avatar
Martin Beseda committed

    void NeuralNetwork::get_jacobian(std::vector<std::vector<double>> &jacobian, std::pair<std::vector<double>, std::vector<double>> &data, std::vector<double> &error) {

        std::vector<double> fv(this->get_n_outputs());

        jacobian.resize(this->get_n_outputs());
        error.resize(this->get_n_outputs());
        for(size_t i = 0; i < this->get_n_outputs(); ++i){
            jacobian[i].resize(this->get_n_weights() + this->get_n_biases());
            std::fill(jacobian[i].begin(), jacobian[i].end(), 0);
        }

        this->eval_single( data.first, fv );

        std::vector<double> error_partial(this->get_n_outputs());
        std::fill(error_partial.begin(), error_partial.end(), 0.0);

        for( size_t i = 0; i < this->get_n_outputs(); ++i){
            error_partial[i] = 1;
            this->add_to_gradient_single(data.first, error_partial, 1.0, jacobian[i]);
            error[i] = data.second[i] - fv[i];
            error_partial[i] = 0;
        }
    }