Newer
Older
Martin Beseda
committed
void NeuralNetwork::save_text(std::string filepath) {
Martin Beseda
committed
{
boost::archive::text_oarchive oa(ofs);
oa << *this;
ofs.close();
}
Martin Beseda
committed
NormalizationStrategy* NeuralNetwork::get_normalization_strategy_instance() {
return this->normalization_strategy;
}
void NeuralNetwork::set_normalization_strategy_instance(NormalizationStrategy *ns) {
if(!ns) {
THROW_RUNTIME_ERROR("Argument 'ns' is not initialized!");
}
this->normalization_strategy = ns;
}
Martin Beseda
committed
FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
NEURON_TYPE hidden_layer_neuron_type,
std::ofstream* ofs) : NeuralNetwork() {
Martin Beseda
committed
std::vector<NEURON_TYPE> tmp;
for(auto i = 0; i < neuron_numbers->size(); i++) {
tmp.emplace_back(hidden_layer_neuron_type);
}
Martin Beseda
committed
this->init(neuron_numbers, &tmp, ofs);
Martin Beseda
committed
}
FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
Martin Beseda
committed
std::vector<lib4neuro::NEURON_TYPE>* hidden_layer_neuron_types,
std::ofstream* ofs) : NeuralNetwork() {
this->init(neuron_numbers, hidden_layer_neuron_types, ofs);
Martin Beseda
committed
}
Martin Beseda
committed
void FullyConnectedFFN::init(std::vector<unsigned int>* neuron_numbers,
std::vector<NEURON_TYPE>* hidden_layer_neuron_types,
std::ofstream* ofs) {
THROW_INVALID_ARGUMENT_ERROR("Parameter 'neuron_numbers' specifying numbers of neurons in network's layers "
"doesn't specify input and output layers, which are compulsory!");
this->neurons = new ::std::vector<Neuron *>(0);
this->neuron_biases = new ::std::vector<double>(0);
this->neuron_potentials = new ::std::vector<double>(0);
this->neuron_bias_indices = new ::std::vector<int>(0);
this->connection_weights = new ::std::vector<double>(0);
this->connection_list = new ::std::vector<ConnectionFunctionGeneral *>(0);
this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
this->neuron_layers_feedforward = new ::std::vector<std::vector<size_t> *>(0);
this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t> *>(0);
this->input_neuron_indices = new ::std::vector<size_t>(0);
this->output_neuron_indices = new ::std::vector<size_t>(0);
this->delete_weights = true;
this->delete_biases = true;
this->layers_analyzed = false;
unsigned int inp_dim = neuron_numbers->at(0); //!< Network input dimension
unsigned int out_dim = neuron_numbers->back(); //!< Network output dimension
COUT_DEBUG("Fully connected feed-forward network being constructed:" << std::endl);
COUT_DEBUG("# of inputs: " << inp_dim << std::endl);
COUT_DEBUG("# of outputs: " << out_dim << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Fully connected feed-forward network being constructed:" << std::endl
<< "# of inputs: " << inp_dim << std::endl
<< "# of outputs: " << out_dim << std::endl);
std::vector<size_t> input_layer_neuron_indices;
std::vector<size_t> previous_layer_neuron_indices;
std::vector<size_t> current_layer_neuron_indices;
/* Creation of INPUT layer neurons */
current_layer_neuron_indices.reserve(inp_dim);
input_layer_neuron_indices.reserve(inp_dim);
for(unsigned int i = 0; i < inp_dim; i++) {
size_t neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices.emplace_back(neuron_id);
}
input_layer_neuron_indices = current_layer_neuron_indices;
/* Creation of HIDDEN layers */
for(unsigned int i = 1; i <= neuron_numbers->size()-2; i++) {
COUT_DEBUG("Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
previous_layer_neuron_indices.reserve(neuron_numbers->at(i-1));
previous_layer_neuron_indices = current_layer_neuron_indices;
current_layer_neuron_indices.clear();
current_layer_neuron_indices.reserve(neuron_numbers->at(i));
/* Creation of one single hidden layer */
for(unsigned int j = 0; j < neuron_numbers->at(i); j++) {
size_t neuron_id;
/* Create new hidden neuron */
Martin Beseda
committed
switch (hidden_layer_neuron_types->at(i-1)) {
case NEURON_TYPE::BINARY: {
neuron_id = this->add_neuron(new NeuronBinary, BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added BINARY neuron." << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Added BINARY neuron." << std::endl);
case NEURON_TYPE::CONSTANT: {
THROW_INVALID_ARGUMENT_ERROR("Constant neurons can't be used in fully connected feed-forward networks!");
case NEURON_TYPE::LINEAR: {
neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added LINEAR neuron." << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Added LINEAR neuron." << std::endl);
case NEURON_TYPE::LOGISTIC: {
neuron_id = this->add_neuron(new NeuronLogistic, BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added LOGISTIC neuron." << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Added LINEAR neuron." << std::endl);
break;
}
}
current_layer_neuron_indices.emplace_back(neuron_id);
/* Connect new neuron with all neurons from the previous layer */
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
for(auto ind : previous_layer_neuron_indices) {
this->add_connection_simple(ind, neuron_id, l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
}
}
previous_layer_neuron_indices.reserve(neuron_numbers->back()-1);
previous_layer_neuron_indices = current_layer_neuron_indices;
current_layer_neuron_indices.clear();
current_layer_neuron_indices.reserve(out_dim);
/* Creation of OUTPUT layer neurons */
for(unsigned int i = 0; i < out_dim; i++) {
size_t neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices.emplace_back(neuron_id);
/* Connect new neuron with all neuron from the previous layer */
for(auto ind : previous_layer_neuron_indices) {
this->add_connection_simple(ind, neuron_id, l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
}
/* Init variables containing indices of INPUT nad OUTPUT neurons */
this->input_neuron_indices = new ::std::vector<size_t>(inp_dim);
this->output_neuron_indices = new ::std::vector<size_t>(out_dim);
*this->input_neuron_indices = input_layer_neuron_indices;
*this->output_neuron_indices = current_layer_neuron_indices;
this->analyze_layer_structure();