Newer
Older
Martin Beseda
committed
std::vector<double> *NeuralNetwork::get_parameter_ptr_biases() {
return this->neuron_biases;
}
Martin Beseda
committed
std::vector<double> *NeuralNetwork::get_parameter_ptr_weights() {
return this->connection_weights;
Martin Beseda
committed
size_t NeuralNetwork::add_new_connection_to_list(ConnectionFunctionGeneral *con) {
this->connection_list->push_back(con);
return this->connection_list->size() - 1;
}

Michal Kravcenko
committed
Martin Beseda
committed
void NeuralNetwork::add_inward_connection(size_t s, size_t t, size_t con_idx) {
if (!this->inward_adjacency->at(s)) {
this->inward_adjacency->at(s) = new ::std::vector<std::pair<size_t, size_t>>(0);
Martin Beseda
committed
this->inward_adjacency->at(s)->push_back(std::pair<size_t, size_t>(t, con_idx));
Martin Beseda
committed
void NeuralNetwork::add_outward_connection(size_t s, size_t t, size_t con_idx) {
if (!this->outward_adjacency->at(s)) {
this->outward_adjacency->at(s) = new ::std::vector<std::pair<size_t, size_t>>(0);
Martin Beseda
committed
this->outward_adjacency->at(s)->push_back(std::pair<size_t, size_t>(t, con_idx));
Martin Beseda
committed
void NeuralNetwork::analyze_layer_structure() {
Martin Beseda
committed
if (this->layers_analyzed) {
//nothing to do
return;
}
Martin Beseda
committed
/* buffer preparation */
this->neuron_potentials->resize(this->get_n_neurons());
Martin Beseda
committed
/* space allocation */
if (this->neuron_layers_feedforward) {
for (auto e: *this->neuron_layers_feedforward) {
delete e;
e = nullptr;
}
delete this->neuron_layers_feedforward;
this->neuron_layers_feedforward = nullptr;

Michal Kravcenko
committed
// if(this->neuron_layers_feedbackward){
// for(auto e: *this->neuron_layers_feedbackward){
// delete e;
// e = nullptr;
// }
// delete this->neuron_layers_feedbackward;
// this->neuron_layers_feedbackward = nullptr;
// }
this->neuron_layers_feedforward = new ::std::vector<std::vector<size_t> *>(0);
// this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t>*>(0);
Martin Beseda
committed
auto n = this->neurons->size();
Martin Beseda
committed
/* helpful counters */
::std::vector<size_t> inward_saturation(n);
::std::vector<size_t> outward_saturation(n);
::std::fill(inward_saturation.begin(), inward_saturation.end(), 0);
::std::fill(outward_saturation.begin(), outward_saturation.end(), 0);
Martin Beseda
committed
for (unsigned int i = 0; i < n; ++i) {
if (this->inward_adjacency->at(i)) {
inward_saturation[i] = this->inward_adjacency->at(i)->size();
}
Martin Beseda
committed
if (this->outward_adjacency->at(i)) {
outward_saturation[i] = this->outward_adjacency->at(i)->size();
}
}
::std::vector<size_t> active_eval_set(2 * n);
Martin Beseda
committed
size_t active_set_size[2];
Martin Beseda
committed
/* feedforward analysis */
active_set_size[0] = 0;
active_set_size[1] = 0;
Martin Beseda
committed
size_t idx1 = 0, idx2 = 1;
active_set_size[0] = this->get_n_inputs();
size_t i = 0;
for (i = 0; i < this->get_n_inputs(); ++i) {
active_eval_set[i] = this->input_neuron_indices->at(i);
}
size_t active_ni;
while (active_set_size[idx1] > 0) {
/* we add the current active set as the new outward layer */
::std::vector<size_t> *new_feedforward_layer = new ::std::vector<size_t>(active_set_size[idx1]);
Martin Beseda
committed
this->neuron_layers_feedforward->push_back(new_feedforward_layer);
//we iterate through the active neurons and propagate the signal
for (i = 0; i < active_set_size[idx1]; ++i) {
active_ni = active_eval_set[i + n * idx1];
new_feedforward_layer->at(i) = active_ni;
Martin Beseda
committed
if (!this->outward_adjacency->at(active_ni)) {
continue;
}
for (auto ni: *(this->outward_adjacency->at(active_ni))) {
inward_saturation[ni.first]--;
Martin Beseda
committed
if (inward_saturation[ni.first] == 0) {
active_eval_set[active_set_size[idx2] + n * idx2] = ni.first;
active_set_size[idx2]++;
}
Martin Beseda
committed
idx1 = idx2;
idx2 = (idx1 + 1) % 2;
Martin Beseda
committed
active_set_size[idx2] = 0;
}

Michal Kravcenko
committed
// /* feed backward analysis */
// active_set_size[0] = 0;
// active_set_size[1] = 0;
//
// idx1 = 0;
// idx2 = 1;
//
// active_set_size[0] = this->get_n_outputs();
// for(i = 0; i < this->get_n_outputs(); ++i){
// active_eval_set[i] = this->output_neuron_indices->at(i);
// }
//
// while(active_set_size[idx1] > 0){
//
// /* we add the current active set as the new outward layer */
// ::std::vector<size_t> *new_feedbackward_layer = new ::std::vector<size_t>(active_set_size[idx1]);

Michal Kravcenko
committed
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
// this->neuron_layers_feedbackward->push_back( new_feedbackward_layer );
//
// //we iterate through the active neurons and propagate the signal backward
// for(i = 0; i < active_set_size[idx1]; ++i){
// active_ni = active_eval_set[i + n * idx1];
// new_feedbackward_layer->at( i ) = active_ni;
//
// if(!this->inward_adjacency->at(active_ni)){
// continue;
// }
//
// for(auto ni: *(this->inward_adjacency->at(active_ni))){
// outward_saturation[ni.first]--;
//
// if(outward_saturation[ni.first] == 0){
// active_eval_set[active_set_size[idx2] + n * idx2] = ni.first;
// active_set_size[idx2]++;
// }
// }
// }
//
// idx1 = idx2;
// idx2 = (idx1 + 1) % 2;
//
// active_set_size[idx2] = 0;
// }
Martin Beseda
committed
this->layers_analyzed = true;
}
Martin Beseda
committed
void NeuralNetwork::save_text(std::string filepath) {
Martin Beseda
committed
{
boost::archive::text_oarchive oa(ofs);
oa << *this;
ofs.close();
}
Martin Beseda
committed
NormalizationStrategy* NeuralNetwork::get_normalization_strategy_instance() {
return this->normalization_strategy;
}
void NeuralNetwork::set_normalization_strategy_instance(NormalizationStrategy *ns) {
if(!ns) {
THROW_RUNTIME_ERROR("Argument 'ns' is not initialized!");
}
this->normalization_strategy = ns;
}
Martin Beseda
committed
FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
NEURON_TYPE hidden_layer_neuron_type,
std::ofstream* ofs) : NeuralNetwork() {
Martin Beseda
committed
std::vector<NEURON_TYPE> tmp;
for(auto i = 0; i < neuron_numbers->size(); i++) {
tmp.emplace_back(hidden_layer_neuron_type);
}
Martin Beseda
committed
this->init(neuron_numbers, &tmp, ofs);
Martin Beseda
committed
}
FullyConnectedFFN::FullyConnectedFFN(std::vector<unsigned int>* neuron_numbers,
Martin Beseda
committed
std::vector<lib4neuro::NEURON_TYPE>* hidden_layer_neuron_types,
std::ofstream* ofs) : NeuralNetwork() {
this->init(neuron_numbers, hidden_layer_neuron_types, ofs);
Martin Beseda
committed
}
Martin Beseda
committed
void FullyConnectedFFN::init(std::vector<unsigned int>* neuron_numbers,
std::vector<NEURON_TYPE>* hidden_layer_neuron_types,
std::ofstream* ofs) {
THROW_INVALID_ARGUMENT_ERROR("Parameter 'neuron_numbers' specifying numbers of neurons in network's layers "
"doesn't specify input and output layers, which are compulsory!");
this->neurons = new ::std::vector<Neuron *>(0);
this->neuron_biases = new ::std::vector<double>(0);
this->neuron_potentials = new ::std::vector<double>(0);
this->neuron_bias_indices = new ::std::vector<int>(0);
this->connection_weights = new ::std::vector<double>(0);
this->connection_list = new ::std::vector<ConnectionFunctionGeneral *>(0);
this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
this->neuron_layers_feedforward = new ::std::vector<std::vector<size_t> *>(0);
this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t> *>(0);
this->input_neuron_indices = new ::std::vector<size_t>(0);
this->output_neuron_indices = new ::std::vector<size_t>(0);
this->delete_weights = true;
this->delete_biases = true;
this->layers_analyzed = false;
unsigned int inp_dim = neuron_numbers->at(0); //!< Network input dimension
unsigned int out_dim = neuron_numbers->back(); //!< Network output dimension
COUT_DEBUG("Fully connected feed-forward network being constructed:" << std::endl);
COUT_DEBUG("# of inputs: " << inp_dim << std::endl);
COUT_DEBUG("# of outputs: " << out_dim << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Fully connected feed-forward network being constructed:" << std::endl
<< "# of inputs: " << inp_dim << std::endl
<< "# of outputs: " << out_dim << std::endl);
Martin Beseda
committed
std::vector<size_t> input_layer_neuron_indices;
std::vector<size_t> previous_layer_neuron_indices;
std::vector<size_t> current_layer_neuron_indices;
/* Creation of INPUT layer neurons */
current_layer_neuron_indices.reserve(inp_dim);
input_layer_neuron_indices.reserve(inp_dim);
for(unsigned int i = 0; i < inp_dim; i++) {
size_t neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices.emplace_back(neuron_id);
}
input_layer_neuron_indices = current_layer_neuron_indices;
/* Creation of HIDDEN layers */
for(unsigned int i = 1; i <= neuron_numbers->size()-2; i++) {
COUT_DEBUG("Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
previous_layer_neuron_indices.reserve(neuron_numbers->at(i-1));
previous_layer_neuron_indices = current_layer_neuron_indices;
current_layer_neuron_indices.clear();
current_layer_neuron_indices.reserve(neuron_numbers->at(i));
/* Creation of one single hidden layer */
for(unsigned int j = 0; j < neuron_numbers->at(i); j++) {
size_t neuron_id;
/* Create new hidden neuron */
Martin Beseda
committed
switch (hidden_layer_neuron_types->at(i-1)) {
case NEURON_TYPE::BINARY: {
neuron_id = this->add_neuron(new NeuronBinary, BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added BINARY neuron." << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Added BINARY neuron." << std::endl);
case NEURON_TYPE::CONSTANT: {
THROW_INVALID_ARGUMENT_ERROR("Constant neurons can't be used in fully connected feed-forward networks!");
case NEURON_TYPE::LINEAR: {
neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added LINEAR neuron." << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Added LINEAR neuron." << std::endl);
case NEURON_TYPE::LOGISTIC: {
neuron_id = this->add_neuron(new NeuronLogistic, BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added LOGISTIC neuron." << std::endl);
Martin Beseda
committed
WRITE_TO_OFS_DEBUG(ofs, "Added LINEAR neuron." << std::endl);
break;
}
}
current_layer_neuron_indices.emplace_back(neuron_id);
/* Connect new neuron with all neurons from the previous layer */
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
for(auto ind : previous_layer_neuron_indices) {
this->add_connection_simple(ind, neuron_id, l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
}
}
previous_layer_neuron_indices.reserve(neuron_numbers->back()-1);
previous_layer_neuron_indices = current_layer_neuron_indices;
current_layer_neuron_indices.clear();
current_layer_neuron_indices.reserve(out_dim);
/* Creation of OUTPUT layer neurons */
for(unsigned int i = 0; i < out_dim; i++) {
size_t neuron_id = this->add_neuron(new NeuronLinear, BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices.emplace_back(neuron_id);
/* Connect new neuron with all neuron from the previous layer */
for(auto ind : previous_layer_neuron_indices) {
this->add_connection_simple(ind, neuron_id, l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
}
/* Init variables containing indices of INPUT nad OUTPUT neurons */
this->input_neuron_indices = new ::std::vector<size_t>(inp_dim);
this->output_neuron_indices = new ::std::vector<size_t>(out_dim);
*this->input_neuron_indices = input_layer_neuron_indices;
*this->output_neuron_indices = current_layer_neuron_indices;
this->analyze_layer_structure();
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
void NeuralNetwork::get_jacobian(std::vector<std::vector<double>> &jacobian, std::pair<std::vector<double>, std::vector<double>> &data, std::vector<double> &error) {
std::vector<double> fv(this->get_n_outputs());
jacobian.resize(this->get_n_outputs());
error.resize(this->get_n_outputs());
for(size_t i = 0; i < this->get_n_outputs(); ++i){
jacobian[i].resize(this->get_n_weights() + this->get_n_biases());
std::fill(jacobian[i].begin(), jacobian[i].end(), 0);
}
this->eval_single( data.first, fv );
std::vector<double> error_partial(this->get_n_outputs());
std::fill(error_partial.begin(), error_partial.end(), 0.0);
for( size_t i = 0; i < this->get_n_outputs(); ++i){
error_partial[i] = 1;
this->add_to_gradient_single(data.first, error_partial, 1.0, jacobian[i]);
error[i] = data.second[i] - fv[i];
error_partial[i] = 0;
}
}