Skip to content
Snippets Groups Projects
Commit dd87d505 authored by Michal Kravcenko's avatar Michal Kravcenko
Browse files

FIX: fixed an issue with no batch size selected

parent 03ac11af
No related branches found
No related tags found
No related merge requests found
......@@ -52,9 +52,18 @@ void lib4neuro::LevenbergMarquardt::LevenbergMarquardtImpl::get_jacobian_and_rhs
std::vector<std::vector<double>> jacobian;
std::vector<double> rhs_vec;
ef.divide_data_train_test((double)data_subset_size / (double)ef.get_n_data_set());
if(data_subset_size <= 0){
data_subset_size = ef.get_n_data_set();
}
if(data_subset_size < ef.get_n_data_set()){
ef.divide_data_train_test((double)data_subset_size / (double)ef.get_n_data_set());
}
ef.get_jacobian_and_rhs(jacobian, rhs_vec);
ef.return_full_data_set_for_training();
if(data_subset_size < ef.get_n_data_set()){
ef.return_full_data_set_for_training();
}
size_t dim_out = jacobian.size();
size_t n_parameters = rhs_vec.size();
......
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 11.3.19 -
*/
<<<<<<< HEAD
#include "Neuron/NeuronBiased.h"
#include "Simulator.h"
#include "../exceptions.h"
=======
#include <Neuron/NeuronBiased.h>
#include "../message.h"
#include "../exceptions.h"
#include "../settings.h"
#include "../General/ExprtkWrapper.h"
#include "../Network/NeuralNetwork.h"
#include "Simulator.h"
>>>>>>> 79990d25651f9a1588ed1649fc19659503371fd0
namespace lib4neuro {
Simulator::Simulator(
size_t n_outputs,
std::vector<size_t>& hidden_net_structure,
std::vector<std::vector<double>>& t,
std::vector<std::vector<double>>& xi
) {
this->neurons = new ::std::vector<Neuron*>(0);
this->neuron_biases = new ::std::vector<double>(0);
this->neuron_potentials = new ::std::vector<double>(0);
this->neuron_bias_indices = new ::std::vector<int>(0);
this->connection_weights = new ::std::vector<double>(0);
this->connection_list = new ::std::vector<ConnectionFunctionGeneral*>(0);
this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>>*>(0);
this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>>*>(0);
this->neuron_layers_feedforward = new ::std::vector<std::vector<size_t>*>(0);
this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t>*>(0);
this->input_neuron_indices = new ::std::vector<size_t>(0);
this->output_neuron_indices = new ::std::vector<size_t>(0);
this->delete_weights = true;
this->delete_biases = true;
this->layers_analyzed = false;
size_t inp_dim = 1; //!< Network input dimension
size_t out_dim = n_outputs; //!< Network output dimension
size_t transfer_dim = xi.size();
this->n_valves = transfer_dim;
COUT_DEBUG("Simulator is being constructed:" << std::endl);
COUT_DEBUG("# of inputs: " << inp_dim << std::endl);
COUT_DEBUG("# of outputs: " << out_dim << std::endl);
COUT_DEBUG("# transfer functions: " << transfer_dim << std::endl);
std::vector<size_t> input_layer_neuron_indices;
std::vector<size_t> output_layer_neuron_indices;
std::vector<size_t> previous_layer_neuron_indices;
std::vector<size_t> current_layer_neuron_indices;
/* Creation of INPUT layer neurons */
current_layer_neuron_indices.reserve(inp_dim);
input_layer_neuron_indices.reserve(inp_dim);
output_layer_neuron_indices.reserve(out_dim);
size_t neuron_id = this->add_neuron(new NeuronLinear,
BIAS_TYPE::NO_BIAS);
input_layer_neuron_indices.emplace_back(neuron_id);
/* Creation of OUTPUT layer neurons */
for (unsigned int i = 0; i < out_dim; i++) {
size_t neuron_id = this->add_neuron(new NeuronLinear,
BIAS_TYPE::NO_BIAS);
output_layer_neuron_indices.emplace_back(neuron_id);
}
std::vector<size_t> bias_layer_neuron_indices;
for (size_t hi = 0; hi < transfer_dim; ++hi) {
COUT_DEBUG(" CONSTRUCTING TRANSFER FUNCTION BETWEEN VALVE #" << (hi + 1)
<< " AND THE OUTPUTS, # OF COPIES PER OUTPUT: "
<< xi[hi].size() << std::endl);
this->index_start_output_connections_inclusive.push_back(this->connection_list->size());
//connection towards the bias neurons
bias_layer_neuron_indices.resize(0);
for (size_t nn = 0; nn < xi[hi].size(); ++nn) {
size_t new_n_idx = this->add_neuron(new NeuronBiased(xi[hi][nn]),
BIAS_TYPE::NO_BIAS);
bias_layer_neuron_indices.emplace_back(new_n_idx);
this->add_connection_constant(input_layer_neuron_indices[0],
new_n_idx,
1.0);
}
for (size_t oi = 0; oi < out_dim; ++oi) {
COUT_DEBUG(" CONSTRUCTING TRANSFER FUNCTION TOWARDS OUTPUT #" << (oi + 1) << std::endl);
size_t first_connection_idx = this->connection_weights->size();
size_t first_bias_idx = this->neuron_biases->size();
COUT_DEBUG(" CONSTRUCTING TRANSFER FUNCTION #" << (1) << std::endl);
//for each timestep, we add a 'copy' of one neural network
std::vector<size_t> current_layer_neuron_indices_local, input_layer_neuron_indices_local, previous_layer_neuron_indices_local;
std::vector<size_t> bias_indices, connection_weights_indices;
/* Creation of INPUT layer neurons */
current_layer_neuron_indices_local.reserve(inp_dim);
input_layer_neuron_indices_local.reserve(inp_dim);
size_t first_neuron_id = bias_layer_neuron_indices[0];
input_layer_neuron_indices_local.emplace_back(first_neuron_id);
current_layer_neuron_indices_local = input_layer_neuron_indices_local;
/* Creation of HIDDEN layers */
for (unsigned int i = 1; i <= hidden_net_structure.size() - 2; i++) {
previous_layer_neuron_indices_local.reserve(hidden_net_structure.at(i - 1));
previous_layer_neuron_indices_local = current_layer_neuron_indices_local;
current_layer_neuron_indices_local.clear();
current_layer_neuron_indices_local.reserve(hidden_net_structure.at(i));
/* Creation of one single hidden layer */
for (unsigned int j = 0; j < hidden_net_structure.at(i); j++) {
/* Create a new hidden neuron */
size_t neuron_id = this->add_neuron(new NeuronLogistic,
BIAS_TYPE::NEXT_BIAS);
current_layer_neuron_indices_local.emplace_back(neuron_id);
/* Connect the new neuron with all neurons from the previous layer */
for (auto ind : previous_layer_neuron_indices_local) {
this->add_connection_simple(ind,
neuron_id,
l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
}
}
previous_layer_neuron_indices_local.reserve(hidden_net_structure.back() - 1);
previous_layer_neuron_indices_local = current_layer_neuron_indices_local;
current_layer_neuron_indices_local.clear();
current_layer_neuron_indices_local.reserve(1);
/* Creation of OUTPUT layer neurons */
size_t last_neuron_id = this->add_neuron(new NeuronLinear,
BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices_local.emplace_back(last_neuron_id);
/* Connect new neuron with all neurons from the previous layer */
for (auto ind : previous_layer_neuron_indices_local) {
this->add_connection_simple(ind,
last_neuron_id,
l4n::SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT);
}
/* connection towards the output */
this->add_connection_constant(last_neuron_id,
output_layer_neuron_indices[oi],
xi[hi][0]);
for (size_t k = 1; k < xi[hi].size(); ++k) {
COUT_DEBUG(" CONSTRUCTING TRANSFER FUNCTION #" << (k + 1) << std::endl);
size_t first_bias_idx_local = first_bias_idx;
size_t first_connection_idx_local = first_connection_idx;
//for each timestep, we add a 'copy' of one neural network
current_layer_neuron_indices_local.clear();
input_layer_neuron_indices_local.clear();
previous_layer_neuron_indices_local.clear();
/* Creation of INPUT layer neurons */
current_layer_neuron_indices_local.reserve(inp_dim);
input_layer_neuron_indices_local.reserve(inp_dim);
first_neuron_id = bias_layer_neuron_indices[k];
input_layer_neuron_indices_local.emplace_back(first_neuron_id);
current_layer_neuron_indices_local = input_layer_neuron_indices_local;
/* Creation of HIDDEN layers */
for (unsigned int i = 1; i <= hidden_net_structure.size() - 2; i++) {
previous_layer_neuron_indices_local.reserve(hidden_net_structure.at(i - 1));
previous_layer_neuron_indices_local = current_layer_neuron_indices_local;
current_layer_neuron_indices_local.clear();
current_layer_neuron_indices_local.reserve(hidden_net_structure.at(i));
/* Creation of one single hidden layer */
for (unsigned int j = 0; j < hidden_net_structure.at(i); j++) {
/* Create a new hidden neuron */
size_t neuron_id_local = this->add_neuron(new NeuronLogistic,
BIAS_TYPE::EXISTING_BIAS,
first_bias_idx_local);
first_bias_idx_local++;
current_layer_neuron_indices_local.emplace_back(neuron_id_local);
/* Connect the new neuron with all neurons from the previous layer */
for (auto ind : previous_layer_neuron_indices_local) {
this->add_existing_connection(ind,
neuron_id_local,
first_connection_idx_local,
*this);
first_connection_idx_local++;
}
}
}
previous_layer_neuron_indices_local.reserve(hidden_net_structure.back() - 1);
previous_layer_neuron_indices_local = current_layer_neuron_indices_local;
current_layer_neuron_indices_local.clear();
current_layer_neuron_indices_local.reserve(1);
/* Creation of OUTPUT layer neurons */
size_t last_neuron_id_local = this->add_neuron(new NeuronLinear,
BIAS_TYPE::NO_BIAS);
current_layer_neuron_indices_local.emplace_back(last_neuron_id_local);
/* Connect new neuron with all neurons from the previous layer */
for (auto ind : previous_layer_neuron_indices_local) {
this->add_existing_connection(ind,
last_neuron_id_local,
first_connection_idx_local,
*this);
first_connection_idx_local++;
}
/* connection towards the output */
this->add_connection_constant(last_neuron_id_local,
output_layer_neuron_indices[oi],
xi[hi][k]);
}
}
this->index_end_output_connections_inclusive.push_back(this->connection_list->size() - 1);
}
/* Init variables containing indices of INPUT nad OUTPUT neurons */
this->input_neuron_indices = new ::std::vector<size_t>(inp_dim);
this->output_neuron_indices = new ::std::vector<size_t>(out_dim);
*this->input_neuron_indices = input_layer_neuron_indices;
*this->output_neuron_indices = output_layer_neuron_indices;
this->analyze_layer_structure();
}
size_t Simulator::get_n_valves() {
return this->n_valves;
}
double Simulator::eval_model(double t,
std::vector<double>& result) {
std::vector<double> input = {t};
std::fill(result.begin(),
result.end(),
0.0);
this->eval_single(input,
result);
}
double Simulator::eval_model(double t,
size_t valve_idx) {
}
double Simulator::eval_model(double t,
size_t valve_idx,
std::vector<double>& result) {
if ((this->input_neuron_indices->size() * this->output_neuron_indices->size()) <= 0) {
THROW_INVALID_ARGUMENT_ERROR("Input and output neurons have not been specified!");
}
if (this->input_neuron_indices->size() != 1) {
THROW_INVALID_ARGUMENT_ERROR("Data input size != Network input size");
}
if (this->output_neuron_indices->size() != result.size()) {
THROW_INVALID_ARGUMENT_ERROR("Data output size != Network output size");
}
this->analyze_layer_structure();
std::vector<double> input = {t};
std::fill(result.begin(),
result.end(),
0.0);
//TODO optimize... right now it just ignores the connections to be disregarded towards all the output
double potential, bias;
int bias_idx;
/* reset of the output and the neuron potentials */
::std::fill(this->neuron_potentials->begin(),
this->neuron_potentials->end(),
0.0);
/* set the potentials of the input neurons */
for (size_t i = 0; i < this->input_neuron_indices->size(); ++i) {
this->neuron_potentials->at(this->input_neuron_indices->at(i)) = input[i];
}
/* we iterate through SOME of the connections and transfer the signals */
for (auto layer: *this->neuron_layers_feedforward) {
/* we iterate through all neurons in this layer and propagate the signal to the neighboring neurons */
for (auto si: *layer) {
bias = 0.0;
bias_idx = this->neuron_bias_indices->at(si);
if (bias_idx >= 0) {
bias = this->neuron_biases->at(bias_idx);
}
potential = this->neurons->at(si)->activate(this->neuron_potentials->at(si),
bias);
for (auto c: *this->outward_adjacency->at(si)) {
size_t ti = c.first;
size_t ci = c.second;
if (ci >= this->index_start_output_connections_inclusive[valve_idx] &&
ci <= this->index_end_output_connections_inclusive[valve_idx]) {
this->neuron_potentials->at(ti) +=
this->connection_list->at(ci)->eval(*this->connection_weights) * potential;
}
}
}
}
unsigned int i = 0;
for (auto oi: *this->output_neuron_indices) {
bias = 0.0;
bias_idx = this->neuron_bias_indices->at(oi);
if (bias_idx >= 0) {
bias = this->neuron_biases->at(bias_idx);
}
result[i] = this->neurons->at(oi)->activate(this->neuron_potentials->at(oi),
bias);
++i;
}
}
}//end of namespace lib4neuro
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 11.3.19 -
*/
#ifndef LIB4NEURO_SIMULATOR_H
#define LIB4NEURO_SIMULATOR_H
#include <iostream>
#include <4neuro.h>
<<<<<<< HEAD
#include "../message.h"
#include "../settings.h"
#include "../General/ExprtkWrapper.h"
#include "../Network/NeuralNetwork.h"
=======
>>>>>>> 79990d25651f9a1588ed1649fc19659503371fd0
namespace lib4neuro {
class Simulator : public NeuralNetwork {
private:
size_t n_valves;
std::vector<size_t> index_start_output_connections_inclusive;
std::vector<size_t> index_end_output_connections_inclusive;
public:
LIB4NEURO_API explicit Simulator(
size_t n_outputs,
std::vector<size_t>& hidden_net_structure,
std::vector<std::vector<double>>& t,
std::vector<std::vector<double>>& xi
);
LIB4NEURO_API size_t get_n_valves();
LIB4NEURO_API double eval_model(double t,
std::vector<double>& result);
LIB4NEURO_API double eval_model(double t,
size_t valve_idx,
std::vector<double>& result);
//TODO
LIB4NEURO_API double eval_model(double t,
size_t valve_idx);
};
};//end of namespace lib4neuro
#endif //LIB4NEURO_SIMULATOR_H
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment