Newer
Older
current_layer_neuron_indices.emplace_back(neuron_id);
}
input_layer_neuron_indices = current_layer_neuron_indices;
/* Creation of HIDDEN layers */
for (unsigned int i = 1; i <= neuron_numbers->size() - 2; i++) {
COUT_DEBUG("Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
WRITE_TO_OFS_DEBUG(ofs,
"Hidden layer #" << i << ": " << neuron_numbers->at(i) << " neurons" << std::endl);
previous_layer_neuron_indices.reserve(neuron_numbers->at(i - 1));
previous_layer_neuron_indices = current_layer_neuron_indices;
current_layer_neuron_indices.clear();
current_layer_neuron_indices.reserve(neuron_numbers->at(i));
/* Creation of one single hidden layer */
for (unsigned int j = 0; j < neuron_numbers->at(i); j++) {
size_t neuron_id;
/* Create new hidden neuron */
switch (hidden_layer_neuron_types->at(i - 1)) {
case NEURON_TYPE::BINARY: {
std::shared_ptr<Neuron> new_neuron;
new_neuron.reset(new NeuronBinary());
neuron_id = this->add_neuron(new_neuron,
BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added BINARY neuron." << std::endl);
WRITE_TO_OFS_DEBUG(ofs,
"Added BINARY neuron." << std::endl);
case NEURON_TYPE::CONSTANT: {
THROW_INVALID_ARGUMENT_ERROR("Constant neurons can't be used in fully connected feed-forward networks!");
case NEURON_TYPE::LINEAR: {
std::shared_ptr<Neuron> new_neuron;
new_neuron.reset(new NeuronLinear());
neuron_id = this->add_neuron(new_neuron,
BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added LINEAR neuron." << std::endl);
WRITE_TO_OFS_DEBUG(ofs,
"Added LINEAR neuron." << std::endl);
case NEURON_TYPE::LOGISTIC: {
std::shared_ptr<Neuron> new_neuron;
new_neuron.reset(new NeuronLogistic());
neuron_id = this->add_neuron(new_neuron,
BIAS_TYPE::NEXT_BIAS);
Martin Beseda
committed
COUT_DEBUG("Added LOGISTIC neuron." << std::endl);
WRITE_TO_OFS_DEBUG(ofs,
"Added LINEAR neuron." << std::endl);
break;
}
}
current_layer_neuron_indices.emplace_back(neuron_id);
/* Connect new neuron with all neurons from the previous layer */
for (auto ind : previous_layer_neuron_indices) {
this->add_connection_simple(ind,
neuron_id,
lib4neuro::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
previous_layer_neuron_indices.reserve(neuron_numbers->back() - 1);
previous_layer_neuron_indices = current_layer_neuron_indices;
current_layer_neuron_indices.clear();
current_layer_neuron_indices.reserve(out_dim);
/* Creation of OUTPUT layer neurons */
std::shared_ptr<Neuron> new_neuron;
new_neuron.reset(new NeuronLinear());
size_t neuron_id = this->add_neuron(new_neuron,
BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices.emplace_back(neuron_id);
/* Connect new neuron with all neuron from the previous layer */
for (auto ind : previous_layer_neuron_indices) {
this->add_connection_simple(ind,
neuron_id,
lib4neuro::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
}
/* Init variables containing indices of INPUT nad OUTPUT neurons */
this->input_neuron_indices = input_layer_neuron_indices;
this->output_neuron_indices = current_layer_neuron_indices;
this->analyze_layer_structure( );
this->randomize_parameters( );
void NeuralNetwork::get_jacobian(std::vector<std::vector<double>>& jacobian,
std::pair<std::vector<double>, std::vector<double>>& data,
std::vector<double>& error) {
std::vector<double> fv(this->get_n_outputs());
jacobian.resize(this->get_n_outputs());
error.resize(this->get_n_outputs());
for (size_t i = 0; i < this->get_n_outputs(); ++i) {
jacobian[i].resize(this->get_n_weights() + this->get_n_biases());
std::fill(jacobian[i].begin(),
jacobian[i].end(),
0);
std::vector<double> error_partial(this->get_n_outputs());
std::fill(error_partial.begin(),
error_partial.end(),
0.0);
for (size_t i = 0; i < this->get_n_outputs(); ++i) {
this->add_to_gradient_single(data.first,
error_partial,
1.0,
jacobian[i]);
std::pair<double, double> NeuralNetwork::get_min_max_weight() {
return std::make_pair(*std::min_element(this->connection_weights.begin(), this->connection_weights.end()),
*std::max_element(this->connection_weights.begin(), this->connection_weights.end()));
}
Martin Beseda
committed
size_t NeuralNetwork::get_input_neurons_number() {
return this->input_neuron_indices.size();
}
size_t NeuralNetwork::get_output_neurons_number() {
return this->output_neuron_indices.size();
}