Newer
Older
Martin Beseda
committed
//
// Created by martin on 19.08.19.
//
#include <string>
#include "../exceptions.h"
Martin Beseda
committed
#include "../settings.h"
#include "ACSFNeuralNetwork.h"
lib4neuro::ACSFNeuralNetwork::ACSFNeuralNetwork(std::unordered_map<ELEMENT_SYMBOL, Element*>& elements,
std::vector<ELEMENT_SYMBOL>& elements_list,
bool with_charge,
std::unordered_map<ELEMENT_SYMBOL, std::vector<unsigned int>> n_hidden_neurons,
std::unordered_map<ELEMENT_SYMBOL, std::vector<NEURON_TYPE>> type_hidden_neurons) {
/* Check parameters */
for(auto symbol : elements_list) {
if(n_hidden_neurons[symbol].size() != type_hidden_neurons[symbol].size()) {
THROW_RUNTIME_ERROR("Number of hidden layers for " + elements[symbol]->getElementSymbol() + " ("
+ std::to_string(n_hidden_neurons[symbol].size())
+ ") doesn't correspond with a number of hidden neuron types ("
+ std::to_string(type_hidden_neurons[symbol].size()) + ")!");
}
}
Martin Beseda
committed
/* Construct the neural network */
std::vector<size_t> inputs;
std::unordered_map<ELEMENT_SYMBOL, size_t> subnet_neuron_shifts;
std::unordered_map<ELEMENT_SYMBOL, size_t> subnet_connection_shifts;
std::unordered_map<ELEMENT_SYMBOL, bool> subnet_constructed;
size_t last_neuron_bias_idx = 0;
size_t last_connection_weight_idx = 0;
std::shared_ptr<Neuron> output_neuron = std::make_shared<NeuronLinear>();
size_t last_neuron_idx = this->add_neuron(output_neuron, BIAS_TYPE::NO_BIAS);
std::vector<size_t> outputs = {last_neuron_idx};
Martin Beseda
committed
for(size_t i = 0; i < elements_list.size(); i++) {
std::vector<size_t> previous_layer;
std::vector<size_t> new_layer;
size_t first_input_neuron_index = last_neuron_idx + 1;
Martin Beseda
committed
/* Create input neurons for sub-net */
std::shared_ptr<NeuronLinear> inp_n;
for(size_t j = 0; j < elements[elements_list.at(i)]->getSymmetryFunctions().size(); j++) {
inp_n = std::make_shared<NeuronLinear>();
last_neuron_idx = this->add_neuron(inp_n, BIAS_TYPE::NO_BIAS);
previous_layer.emplace_back(last_neuron_idx);
inputs.emplace_back(last_neuron_idx);
Martin Beseda
committed
}
/* Add an additional input neuron for charge, if provided */
if(with_charge) {
inp_n = std::make_shared<NeuronLinear>();
last_neuron_idx = this->add_neuron(inp_n, BIAS_TYPE::NO_BIAS);
previous_layer.emplace_back(last_neuron_idx);
inputs.emplace_back(last_neuron_idx);
Martin Beseda
committed
}
/* Create subnet for the current element */
bool new_subnet = false;
if(subnet_constructed.find(elements_list.at(i)) == subnet_constructed.end()) {
subnet_constructed[elements_list.at(i)] = true;
subnet_neuron_shifts[elements_list.at(i)] = last_neuron_bias_idx;
subnet_connection_shifts[elements_list.at(i)] = last_connection_weight_idx;
// std::cout << "Particle " << i << ", input neuron indices: " << first_input_neuron_index << " - " << last_neuron_idx << std::endl;
Martin Beseda
committed
/* Create hidden layers in sub-net */
std::vector<unsigned int> n_neurons = n_hidden_neurons[elements_list.at(i)];
std::vector<NEURON_TYPE> types = type_hidden_neurons[elements_list.at(i)];
size_t local_neuron_idx = subnet_neuron_shifts[elements_list.at(i)];
size_t local_connection_idx = subnet_connection_shifts[elements_list.at(i)];
Martin Beseda
committed
for(size_t j = 0; j < n_neurons.size(); j++) { /* Iterate over hidden layers */
/* Create hidden neurons */
for(size_t k = 0; k < n_neurons.at(j); k++) {
std::shared_ptr<Neuron> hid_n;
switch(types.at(j)) {
case NEURON_TYPE::LOGISTIC: {
hid_n = std::make_shared<NeuronLogistic>();
break;
}
case NEURON_TYPE::BINARY: {
hid_n = std::make_shared<NeuronBinary>();
break;
}
case NEURON_TYPE::CONSTANT: {
hid_n = std::make_shared<NeuronConstant>();
break;
}
case NEURON_TYPE::LINEAR: {
hid_n = std::make_shared<NeuronLinear>();
break;
}
}
last_neuron_idx = this->add_neuron(hid_n,
// std::cout << " new subnet, neuron index: " << last_neuron_idx << ", neuron bias: " << last_neuron_bias_idx << std::endl;
last_neuron_bias_idx++;
last_neuron_idx = this->add_neuron(hid_n,
BIAS_TYPE::EXISTING_BIAS, local_neuron_idx);
// std::cout << " old subnet, neuron index: " << last_neuron_idx << ", neuron bias: " << local_neuron_idx << std::endl;
new_layer.emplace_back(last_neuron_idx);
Martin Beseda
committed
/* Connect hidden neuron to the previous layer */
for(auto prev_n : previous_layer) {
this->add_connection_simple(prev_n,
last_neuron_idx,
SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
// std::cout << " new subnet, connection weight bias: " << last_connection_weight_idx << std::endl;
last_connection_weight_idx++;
} else {
this->add_connection_simple(prev_n,
last_neuron_idx,
SIMPLE_CONNECTION_TYPE::EXISTING_WEIGHT, local_connection_idx);
// std::cout << " old subnet, connection weight bias: " << local_connection_idx << std::endl;
Martin Beseda
committed
}
}
previous_layer = new_layer;
new_layer.clear();
Martin Beseda
committed
}
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
/* Create hidden layers in sub-net */
// std::vector<unsigned int> n_neurons = n_hidden_neurons[elements_list.at(i)];
// std::vector<NEURON_TYPE> types = type_hidden_neurons[elements_list.at(i)];
// for(size_t j = 0; j < n_neurons.size(); j++) { /* Iterate over hidden layers */
// /* Create hidden neurons */
// for(size_t k = 0; k < n_neurons.at(j); k++) {
// std::shared_ptr<Neuron> hid_n;
// switch(types.at(j)) {
// case NEURON_TYPE::LOGISTIC: {
// hid_n = std::make_shared<NeuronLogistic>();
// break;
// }
// case NEURON_TYPE::BINARY: {
// hid_n = std::make_shared<NeuronBinary>();
// break;
// }
// case NEURON_TYPE::CONSTANT: {
// hid_n = std::make_shared<NeuronConstant>();
// break;
// }
// case NEURON_TYPE::LINEAR: {
// hid_n = std::make_shared<NeuronLinear>();
// break;
// }
// }
//
// neuron_idx = this->add_neuron(hid_n, BIAS_TYPE::NEXT_BIAS);
// new_layer.emplace_back(neuron_idx);
//
// /* Connect hidden neuron to the previous layer */
// for(auto prev_n : previous_layer) {
// this->add_connection_simple(prev_n, neuron_idx, SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
// }
// previous_layer = new_layer;
// new_layer.clear();
// }
// }
Martin Beseda
committed
/* Create output neurons for sub-net */
for(auto prev_n : previous_layer) {
this->add_connection_constant(prev_n, outputs[ 0 ], 1.0);
Martin Beseda
committed
}
Martin Beseda
committed
}
/* Specify network inputs and outputs */
Martin Beseda
committed
this->specify_input_neurons(inputs);
this->specify_output_neurons(outputs);
}