Skip to content
Snippets Groups Projects
ACSFNeuralNetwork.cpp 7.74 KiB
Newer Older
  • Learn to ignore specific revisions
  • //
    // Created by martin on 19.08.19.
    //
    
    #include "../settings.h"
    #include "ACSFNeuralNetwork.h"
    
    lib4neuro::ACSFNeuralNetwork::ACSFNeuralNetwork(std::unordered_map<ELEMENT_SYMBOL, Element*>& elements,
                                                    std::vector<ELEMENT_SYMBOL>& elements_list,
                                                    bool with_charge,
                                                    std::unordered_map<ELEMENT_SYMBOL, std::vector<unsigned int>> n_hidden_neurons,
                                                    std::unordered_map<ELEMENT_SYMBOL, std::vector<NEURON_TYPE>> type_hidden_neurons) {
        /* Construct the neural network */
        std::vector<size_t> inputs;
    
        std::unordered_map<ELEMENT_SYMBOL, size_t> subnet_neuron_shifts;
        std::unordered_map<ELEMENT_SYMBOL, size_t> subnet_connection_shifts;
        std::unordered_map<ELEMENT_SYMBOL, bool> subnet_constructed;
    
    	size_t last_neuron_bias_idx = 0;
    	size_t last_connection_weight_idx = 0;
    
    	std::shared_ptr<Neuron> output_neuron = std::make_shared<NeuronLinear>();
    	size_t last_neuron_idx = this->add_neuron(output_neuron, BIAS_TYPE::NO_BIAS);
        std::vector<size_t> outputs = {last_neuron_idx};
    	
    
        for(size_t i = 0; i < elements_list.size(); i++) {
            std::vector<size_t> previous_layer;
            std::vector<size_t> new_layer;
    
    
    		size_t first_input_neuron_index = last_neuron_idx + 1;
    		
    
            /* Create input neurons for sub-net */
            std::shared_ptr<NeuronLinear> inp_n;
            for(size_t j = 0; j < elements[elements_list.at(i)]->getSymmetryFunctions().size(); j++) {
                inp_n = std::make_shared<NeuronLinear>();
    
                last_neuron_idx = this->add_neuron(inp_n, BIAS_TYPE::NO_BIAS);
                previous_layer.emplace_back(last_neuron_idx);
                inputs.emplace_back(last_neuron_idx);
    
            }
    
            /* Add an additional input neuron for charge, if provided */
            if(with_charge) {
                inp_n = std::make_shared<NeuronLinear>();
    
                last_neuron_idx = this->add_neuron(inp_n, BIAS_TYPE::NO_BIAS);
                previous_layer.emplace_back(last_neuron_idx);
                inputs.emplace_back(last_neuron_idx);
    
            /* Create subnet for the current element */
            bool new_subnet = false;
    
            if(subnet_constructed.find(elements_list.at(i)) == subnet_constructed.end()) {
    
                new_subnet = true;
    
    			subnet_constructed[elements_list.at(i)] = true;
    			subnet_neuron_shifts[elements_list.at(i)] = last_neuron_bias_idx;
    			subnet_connection_shifts[elements_list.at(i)] = last_connection_weight_idx;
    
    		
    		// std::cout << "Particle " << i << ", input neuron indices: " << first_input_neuron_index << " - " << last_neuron_idx << std::endl;
    
            /* Create hidden layers in sub-net */
            std::vector<unsigned int> n_neurons = n_hidden_neurons[elements_list.at(i)];
            std::vector<NEURON_TYPE> types = type_hidden_neurons[elements_list.at(i)];
    
    		
            size_t local_neuron_idx = subnet_neuron_shifts[elements_list.at(i)];
            size_t local_connection_idx = subnet_connection_shifts[elements_list.at(i)];
    
            for(size_t j = 0; j < n_neurons.size(); j++) { /* Iterate over hidden layers */
                /* Create hidden neurons */
                for(size_t k = 0; k < n_neurons.at(j); k++) {
                    std::shared_ptr<Neuron> hid_n;
                    switch(types.at(j)) {
                        case NEURON_TYPE::LOGISTIC: {
                            hid_n = std::make_shared<NeuronLogistic>();
                            break;
                        }
                        case NEURON_TYPE::BINARY: {
                            hid_n = std::make_shared<NeuronBinary>();
                            break;
                        }
                        case NEURON_TYPE::CONSTANT: {
                            hid_n = std::make_shared<NeuronConstant>();
                            break;
                        }
                        case NEURON_TYPE::LINEAR: {
                            hid_n = std::make_shared<NeuronLinear>();
                            break;
                        }
                    }
    
    
                    if(new_subnet) {
    
                        last_neuron_idx = this->add_neuron(hid_n,
    
                                                      BIAS_TYPE::NEXT_BIAS);
    
    					// std::cout << "  new subnet, neuron index: " << last_neuron_idx << ", neuron bias: " << last_neuron_bias_idx << std::endl;
    					last_neuron_bias_idx++;
    
                        last_neuron_idx = this->add_neuron(hid_n,
    
                                                      BIAS_TYPE::EXISTING_BIAS, local_neuron_idx);
    
    					// std::cout << "  old subnet, neuron index: " << last_neuron_idx << ", neuron bias: " << local_neuron_idx << std::endl;
    
                    }
                    local_neuron_idx++;
    
                    new_layer.emplace_back(last_neuron_idx);
    
    
                    /* Connect hidden neuron to the previous layer */
                    for(auto prev_n : previous_layer) {
    
                        if(new_subnet) {
    
                            this->add_connection_simple(prev_n,
                                                        last_neuron_idx,
                                                        SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
    						// std::cout << "    new subnet, connection weight bias: " << last_connection_weight_idx << std::endl;
    						last_connection_weight_idx++;
    
                        } else {
                            this->add_connection_simple(prev_n,
    
                                                        last_neuron_idx,
                                                        SIMPLE_CONNECTION_TYPE::EXISTING_WEIGHT, local_connection_idx);
    						// std::cout << "    old subnet, connection weight bias: " << local_connection_idx << std::endl;
    
                        }
                        local_connection_idx++;
    
                previous_layer = new_layer;
                new_layer.clear();
    
            /* Create hidden layers in sub-net */
    //        std::vector<unsigned int> n_neurons = n_hidden_neurons[elements_list.at(i)];
    //        std::vector<NEURON_TYPE> types = type_hidden_neurons[elements_list.at(i)];
    //        for(size_t j = 0; j < n_neurons.size(); j++) { /* Iterate over hidden layers */
    //            /* Create hidden neurons */
    //            for(size_t k = 0; k < n_neurons.at(j); k++) {
    //                std::shared_ptr<Neuron> hid_n;
    //                switch(types.at(j)) {
    //                    case NEURON_TYPE::LOGISTIC: {
    //                        hid_n = std::make_shared<NeuronLogistic>();
    //                        break;
    //                    }
    //                    case NEURON_TYPE::BINARY: {
    //                        hid_n = std::make_shared<NeuronBinary>();
    //                        break;
    //                    }
    //                    case NEURON_TYPE::CONSTANT: {
    //                        hid_n = std::make_shared<NeuronConstant>();
    //                        break;
    //                    }
    //                    case NEURON_TYPE::LINEAR: {
    //                        hid_n = std::make_shared<NeuronLinear>();
    //                        break;
    //                    }
    //                }
    //
    //                neuron_idx = this->add_neuron(hid_n, BIAS_TYPE::NEXT_BIAS);
    //                new_layer.emplace_back(neuron_idx);
    //
    //                /* Connect hidden neuron to the previous layer */
    //                for(auto prev_n : previous_layer) {
    //                    this->add_connection_simple(prev_n, neuron_idx, SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
    //                }
    //                previous_layer = new_layer;
    //                new_layer.clear();
    //            }
    //        }
    
    
            /* Create output neurons for sub-net */
            for(auto prev_n : previous_layer) {
    
                this->add_connection_constant(prev_n, outputs[ 0 ], 1.0);
    
        /* Specify network inputs and outputs */
    
        this->specify_input_neurons(inputs);
        this->specify_output_neurons(outputs);
    }