Skip to content
Snippets Groups Projects
NeuralNetworkSum.cpp 3.13 KiB
Newer Older
  • Learn to ignore specific revisions
  • /**
     * DESCRIPTION OF THE FILE
     *
     * @author Michal Kravčenko
     * @date 18.7.18 -
     */
    
    
    #include "NeuralNetworkSum.h"
    
    #include "NeuralNetworkSumSerialization.h"
    #include "General/ExprtkWrapperSerialization.h"
    
    BOOST_CLASS_EXPORT_IMPLEMENT(NeuralNetworkSum);
    
    
    NeuralNetworkSum::NeuralNetworkSum(){
        this->summand = nullptr;
        this->summand_coefficient = nullptr;
    }
    
    NeuralNetworkSum::~NeuralNetworkSum() {
        if( this->summand ){
            delete this->summand;
    
        }
        if( this->summand_coefficient ){
    
    
            for(auto el: *this->summand_coefficient){
                delete el;
            }
    
    
            delete this->summand_coefficient;
    
            this->summand_coefficient = nullptr;
    
    void NeuralNetworkSum::add_network( NeuralNetwork *net, std::string expression_string ) {
    
        if(!this->summand){
            this->summand = new std::vector<NeuralNetwork*>(0);
        }
        this->summand->push_back( net );
    
        if(!this->summand_coefficient){
    
            this->summand_coefficient = new std::vector<ExprtkWrapper*>(0);
    
        this->summand_coefficient->push_back( new ExprtkWrapper( expression_string ) );
    
    Michal Kravcenko's avatar
    Michal Kravcenko committed
    void NeuralNetworkSum::eval_single(std::vector<double> &input, std::vector<double> &output, std::vector<double> *custom_weights_and_biases) {
    
        std::vector<double> mem_output(output.size());
        std::fill(output.begin(), output.end(), 0.0);
    
    
    Michal Kravcenko's avatar
    Michal Kravcenko committed
        for(size_t ni = 0; ni < this->summand->size(); ++ni){
    
            SUM = this->summand->at(ni);
    
            if( SUM ){
                this->summand->at(ni)->eval_single(input, mem_output, custom_weights_and_biases);
    
                double alpha = this->summand_coefficient->at(ni)->eval(input);
    
                for(size_t j = 0; j < output.size(); ++j){
                    output[j] += mem_output[j] * alpha;
                }
            }
            else{
                //TODO assume the result can be a vector of doubles
    
                double alpha = this->summand_coefficient->at(ni)->eval(input);
    
    
                for(size_t j = 0; j < output.size(); ++j){
                    output[j] += alpha;
                }
    
            }
        }
    
    }
    
    size_t NeuralNetworkSum::get_n_weights(){
    
    Michal Kravcenko's avatar
    Michal Kravcenko committed
        //TODO insufficient solution, assumes the networks share weights
    
        if(this->summand){
            return this->summand->at(0)->get_n_weights();
        }
    
    
    Michal Kravcenko's avatar
    Michal Kravcenko committed
        return 0;
    }
    
    size_t NeuralNetworkSum::get_n_biases(){
        //TODO insufficient solution, assumes the networks share weights
        if(this->summand){
            return this->summand->at(0)->get_n_biases();
        }
    
        return 0;
    }
    
    size_t NeuralNetworkSum::get_n_inputs() {
        //TODO insufficient solution, assumes the networks share weights
        if(this->summand){
            return this->summand->at(0)->get_n_inputs();
        }
    
        return 0;
    }
    
    size_t NeuralNetworkSum::get_n_neurons() {
        //TODO insufficient solution, assumes the networks share weights
        if(this->summand){
            return this->summand->at(0)->get_n_neurons();
        }
    
        return 0;
    }
    
    size_t NeuralNetworkSum::get_n_outputs() {
        //TODO insufficient solution, assumes the networks share weights
        if(this->summand){
            return this->summand->at(0)->get_n_outputs();
        }