Newer
Older
Martin Beseda
committed
//
// Created by martin on 19.08.19.
//
#include "../settings.h"
#include "ACSFNeuralNetwork.h"
lib4neuro::ACSFNeuralNetwork::ACSFNeuralNetwork(std::unordered_map<ELEMENT_SYMBOL, Element*>& elements,
std::vector<ELEMENT_SYMBOL>& elements_list,
bool with_charge,
std::unordered_map<ELEMENT_SYMBOL, std::vector<unsigned int>> n_hidden_neurons,
std::unordered_map<ELEMENT_SYMBOL, std::vector<NEURON_TYPE>> type_hidden_neurons) {
/* Construct the neural network */
std::vector<size_t> inputs;
std::vector<size_t> subnet_outputs;
size_t neuron_idx;
std::unordered_map<ELEMENT_SYMBOL, std::vector<size_t>> subnet_idxs;
std::unordered_map<ELEMENT_SYMBOL, std::vector<size_t>> subnet_neuron_idxs;
std::unordered_map<ELEMENT_SYMBOL, std::vector<size_t>> subnet_connection_idxs;
Martin Beseda
committed
for(size_t i = 0; i < elements_list.size(); i++) {
std::vector<size_t> previous_layer;
std::vector<size_t> new_layer;
/* Create input neurons for sub-net */
std::shared_ptr<NeuronLinear> inp_n;
for(size_t j = 0; j < elements[elements_list.at(i)]->getSymmetryFunctions().size(); j++) {
inp_n = std::make_shared<NeuronLinear>();
neuron_idx = this->add_neuron(inp_n, BIAS_TYPE::NO_BIAS);
previous_layer.emplace_back(neuron_idx);
inputs.emplace_back(neuron_idx);
}
/* Add an additional input neuron for charge, if provided */
if(with_charge) {
inp_n = std::make_shared<NeuronLinear>();
neuron_idx = this->add_neuron(inp_n, BIAS_TYPE::NO_BIAS);
previous_layer.emplace_back(neuron_idx);
inputs.emplace_back(neuron_idx);
}
/* Create subnet for the current element */
bool new_subnet = false;
if(subnet_neuron_idxs.find(elements_list.at(i)) == subnet_neuron_idxs.end()) {
new_subnet = true;
subnet_neuron_idxs[elements_list.at(i)] = std::vector<size_t>();
subnet_connection_idxs[elements_list.at(i)] = std::vector<size_t>();
}
Martin Beseda
committed
/* Create hidden layers in sub-net */
std::vector<unsigned int> n_neurons = n_hidden_neurons[elements_list.at(i)];
std::vector<NEURON_TYPE> types = type_hidden_neurons[elements_list.at(i)];
size_t local_neuron_idx = 0;
size_t local_connection_idx = 0;
Martin Beseda
committed
for(size_t j = 0; j < n_neurons.size(); j++) { /* Iterate over hidden layers */
/* Create hidden neurons */
for(size_t k = 0; k < n_neurons.at(j); k++) {
std::shared_ptr<Neuron> hid_n;
switch(types.at(j)) {
case NEURON_TYPE::LOGISTIC: {
hid_n = std::make_shared<NeuronLogistic>();
break;
}
case NEURON_TYPE::BINARY: {
hid_n = std::make_shared<NeuronBinary>();
break;
}
case NEURON_TYPE::CONSTANT: {
hid_n = std::make_shared<NeuronConstant>();
break;
}
case NEURON_TYPE::LINEAR: {
hid_n = std::make_shared<NeuronLinear>();
break;
}
}
if(new_subnet) {
neuron_idx = this->add_neuron(hid_n,
BIAS_TYPE::NEXT_BIAS);
subnet_neuron_idxs[elements_list.at(i)].emplace_back(neuron_idx);
} else {
std::cout << subnet_neuron_idxs[elements_list.at(i)].at(0)+local_neuron_idx << std::endl;
BIAS_TYPE::EXISTING_BIAS, local_neuron_idx);
Martin Beseda
committed
new_layer.emplace_back(neuron_idx);
/* Connect hidden neuron to the previous layer */
for(auto prev_n : previous_layer) {
if(new_subnet) {
subnet_connection_idxs[elements_list.at(i)].emplace_back(this->add_connection_simple(prev_n,
neuron_idx,
SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT));
} else {
this->add_connection_simple(prev_n,
neuron_idx,
SIMPLE_CONNECTION_TYPE::EXISTING_WEIGHT, subnet_connection_idxs[elements_list.at(i)].at(0)+local_connection_idx);
}
local_connection_idx++;
Martin Beseda
committed
}
}
previous_layer = new_layer;
new_layer.clear();
Martin Beseda
committed
}
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
/* Create hidden layers in sub-net */
// std::vector<unsigned int> n_neurons = n_hidden_neurons[elements_list.at(i)];
// std::vector<NEURON_TYPE> types = type_hidden_neurons[elements_list.at(i)];
// for(size_t j = 0; j < n_neurons.size(); j++) { /* Iterate over hidden layers */
// /* Create hidden neurons */
// for(size_t k = 0; k < n_neurons.at(j); k++) {
// std::shared_ptr<Neuron> hid_n;
// switch(types.at(j)) {
// case NEURON_TYPE::LOGISTIC: {
// hid_n = std::make_shared<NeuronLogistic>();
// break;
// }
// case NEURON_TYPE::BINARY: {
// hid_n = std::make_shared<NeuronBinary>();
// break;
// }
// case NEURON_TYPE::CONSTANT: {
// hid_n = std::make_shared<NeuronConstant>();
// break;
// }
// case NEURON_TYPE::LINEAR: {
// hid_n = std::make_shared<NeuronLinear>();
// break;
// }
// }
//
// neuron_idx = this->add_neuron(hid_n, BIAS_TYPE::NEXT_BIAS);
// new_layer.emplace_back(neuron_idx);
//
// /* Connect hidden neuron to the previous layer */
// for(auto prev_n : previous_layer) {
// this->add_connection_simple(prev_n, neuron_idx, SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
// }
// previous_layer = new_layer;
// new_layer.clear();
// }
// }
Martin Beseda
committed
/* Create output neurons for sub-net */
std::shared_ptr<NeuronLinear> sub_out_n = std::make_shared<NeuronLinear>();
neuron_idx = this->add_neuron(sub_out_n, BIAS_TYPE::NO_BIAS);
subnet_outputs.emplace_back(neuron_idx);
for(auto prev_n : previous_layer) {
this->add_connection_simple(prev_n, neuron_idx);
}
}
/* Specify network inputs */
this->specify_input_neurons(inputs);
/* Create final output layer */
std::shared_ptr<NeuronLinear> final_out_n = std::make_shared<NeuronLinear>();
neuron_idx = this->add_neuron(final_out_n, BIAS_TYPE::NO_BIAS);
for(auto subnet_output : subnet_outputs) {
this->add_connection_constant(subnet_output, neuron_idx, 1);
}
std::vector<size_t> outputs = {neuron_idx};
this->specify_output_neurons(outputs);
}