Commit bbdcff15 authored by Michal Kravcenko's avatar Michal Kravcenko

-added a support for multi-network systems sharing edge weights

-added an example testing multi-network error function
parent dfaf4073
......@@ -41,4 +41,31 @@ double MSE::eval(double *weights) {
}
return error/n_elements;
}
MSE_SUM::MSE_SUM() {
this->summand = nullptr;
}
MSE_SUM::~MSE_SUM(){
if( this->summand ){
delete this->summand;
}
}
double MSE_SUM::eval(double *weights) {
double output = 0.0;
for(ErrorFunction *f: *this->summand){
output += f->eval( weights );
}
return output;
}
void MSE_SUM::add_error_function(ErrorFunction *F) {
if(!this->summand){
this->summand = new std::vector<ErrorFunction*>(0);
}
this->summand->push_back(F);
}
\ No newline at end of file
......@@ -42,12 +42,6 @@ public:
*/
MSE(NeuralNetwork* net, DataSet* ds);
/**
* Constructor for multiple error functions, which will get summed up
* @param func_vec
*/
//MSE(std::vector<ErrorFunction> func_vec);
/**
*
* @param weights
......@@ -61,5 +55,34 @@ private:
DataSet* ds;
};
class MSE_SUM : ErrorFunction{
public:
/**
*
*/
MSE_SUM();
/**
*
*/
~MSE_SUM();
/**
*
* @param weights
* @return
*/
virtual double eval(double* weights);
/**
*
* @param F
*/
void add_error_function(ErrorFunction *F);
private:
std::vector<ErrorFunction*> *summand;
};
#endif //INC_4NEURO_ERRORFUNCTION_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "FunctionBase.h"
FunctionBase::FunctionBase() {}
FunctionBase::~FunctionBase() {
}
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_FUNCTIONBASE_H
#define INC_4NEURO_FUNCTIONBASE_H
class FunctionBase {
public:
FunctionBase();
virtual ~FunctionBase();
virtual double eval(double x) = 0;
virtual FunctionBase* get_derivative() = 0;
protected:
FunctionBase* derivative = nullptr;
private:
};
#endif //INC_4NEURO_FUNCTIONBASE_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "FunctionCosine.h"
FunctionCosine::FunctionCosine(double period, double bias) {
this->period = period;
this->bias = bias;
}
double FunctionCosine::eval(double x) {
return std::cos(x * this->period + this->bias);
}
FunctionBase* FunctionCosine::get_derivative() {
if(!this->derivative){
this->derivative = new FunctionSine();
}
return this->derivative;
}
\ No newline at end of file
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_FUNCTIONCOSINE_H
#define INC_4NEURO_FUNCTIONCOSINE_H
#include <cmath>
#include "FunctionBase.h"
#include "FunctionSine.h"
class FunctionCosine: FunctionBase {
public:
FunctionCosine(double period, double bias);
double eval(double x);
FunctionBase* get_derivative();
private:
double period = 0.0;
double bias = 0.0;
};
#endif //INC_4NEURO_FUNCTIONCOSINE_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "FunctionExponential.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_FUNCTIONEXPONENTIAL_H
#define INC_4NEURO_FUNCTIONEXPONENTIAL_H
#include "FunctionBase.h"
class FunctionExponential: FunctionBase {
};
#endif //INC_4NEURO_FUNCTIONEXPONENTIAL_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "FunctionLogarithm.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_FUNCTIONLOGARITHM_H
#define INC_4NEURO_FUNCTIONLOGARITHM_H
#include "FunctionBase.h"
class FunctionLogarithm: FunctionBase {
};
#endif //INC_4NEURO_FUNCTIONLOGARITHM_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "FunctionPolynomial.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_FUNCTIONPOLYNOMIAL_H
#define INC_4NEURO_FUNCTIONPOLYNOMIAL_H
#include "FunctionBase.h"
class FunctionPolynomial: FunctionBase {
};
#endif //INC_4NEURO_FUNCTIONPOLYNOMIAL_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "FunctionSine.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_FUNCTIONSINE_H
#define INC_4NEURO_FUNCTIONSINE_H
#include "FunctionBase.h"
class FunctionSine: FunctionBase {
};
#endif //INC_4NEURO_FUNCTIONSINE_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "Graph.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_GRAPH_H
#define INC_4NEURO_GRAPH_H
//TODO verification of decision making (for each input determine the magnitude of its influence over the output)
//TODO form a graph of the inverse function
class Graph {
};
#endif //INC_4NEURO_GRAPH_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "VertexBase.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_VERTEXBASE_H
#define INC_4NEURO_VERTEXBASE_H
class VertexBase {
};
#endif //INC_4NEURO_VERTEXBASE_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "VertexCompound.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_VERTEXCOMPOUND_H
#define INC_4NEURO_VERTEXCOMPOUND_H
class VertexCompound {
};
#endif //INC_4NEURO_VERTEXCOMPOUND_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "VertexConstant.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_VERTEXCONSTANT_H
#define INC_4NEURO_VERTEXCONSTANT_H
class VertexConstant {
};
#endif //INC_4NEURO_VERTEXCONSTANT_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#include "VertexSum.h"
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 4.7.18 -
*/
#ifndef INC_4NEURO_VERTEXSUM_H
#define INC_4NEURO_VERTEXSUM_H
class VertexSum {
};
#endif //INC_4NEURO_VERTEXSUM_H
......@@ -7,15 +7,21 @@
#include "Connection.h"
Connection::Connection(Neuron *n_in, Neuron *n_out, ConnectionWeight* con) {
Connection* Connection::get_copy(Neuron *n_in, Neuron *n_out){
Connection *output = new Connection(n_in, n_out, this->con, false);
return output;
}
Connection::Connection(Neuron *n_in, Neuron *n_out, ConnectionWeight* con, bool del_weight) {
this->neuron_in = n_in;
this->neuron_out = n_out;
this->con = con;
this->delete_connection = del_weight;
}
Connection::~Connection() {
if(this->con){
if(this->con && this->delete_connection ){
delete this->con;
this->con = nullptr;
}
......
......@@ -34,18 +34,32 @@ private:
*/
Neuron *neuron_out = nullptr;
/**
*
*/
bool delete_connection = true;
//TODO pridat gradient pro ucely backpropagation
public:
/**
* Returns a new object reprresenting an edge using the same weight function as this one
* @param n_in
* @param n_out
* @return
*/
virtual Connection* get_copy(Neuron *n_in, Neuron *n_out) final;
/**
* Constructor
* @param[in] n_in Pointer to the Neuron on the receiving end of this connection
* @param[in] n_out Pointer to the Neuron on the signaling end of this connection
* @param[in] con Pointer to an object evaluating the weight function
*
* @param n_in
* @param n_out
* @param con
* @param delete_weight
*/
Connection(Neuron *n_in, Neuron *n_out, ConnectionWeight* con);
Connection(Neuron *n_in, Neuron *n_out, ConnectionWeight* con, bool delete_weight = true);
/**
* Destructor, deletes the 'con' object
......
......@@ -58,7 +58,7 @@ public:
/**
*
*/
~ConnectionWeight();
virtual ~ConnectionWeight();
/**
*
......
This diff is collapsed.
......@@ -64,6 +64,11 @@ private:
*/
bool in_out_determined = false;
/**
*
*/
bool delete_weights = true;
/**
*
*/
......@@ -74,6 +79,12 @@ private:
*/
void determine_inputs_outputs();
/**
*
* @param weight_ptr
*/
void set_weight_array( std::vector<double>* weight_ptr );
public:
/**
......@@ -81,11 +92,22 @@ public:
*/
NeuralNetwork();
/**
*
*/
~NeuralNetwork();
/**
* If possible, returns a neural net with 'input_neuron_indices' neurons as inputs and 'output_neuron_indices' as
* outputs, otherwise returns nullptr. The returned object shares adjustable weights with this network. All
* neurons are coppied (new instances), edges also. Uses a breadth-first search as the underlying algorithm.
* @param input_neuron_indices
* @param output_neuron_indices
* @return
*/
NeuralNetwork* get_subnet(std::vector<size_t> &input_neuron_indices, std::vector<size_t> &output_neuron_indices);
/**
*
* @param[in] input
......@@ -115,7 +137,7 @@ public:
* @param n2_idx
* @param weight_idx
*/
void add_connection_simple(int n1_idx, int n2_idx, int weight_idx);
void add_connection_simple(int n1_idx, int n2_idx, size_t weight_idx);
/**
*
......@@ -124,7 +146,7 @@ public:
* @param[in] weight_idx
* @param[in] weight_value
*/
void add_connection_simple(int n1_idx, int n2_idx, int weight_idx, double weight_value);
void add_connection_simple(int n1_idx, int n2_idx, size_t weight_idx, double weight_value);
/**
*
......@@ -136,7 +158,7 @@ public:
* @param n_weights
*/
void add_connection_general(int n1_idx, int n2_idx, std::function<double(double *, int*, int)> *f,
int* weight_indices, double* weight_values, int n_weights);
int* weight_indices, double* weight_values, size_t n_weights);
/**
*
......@@ -167,6 +189,18 @@ public:
*/
size_t get_n_weights();
/**
*
* @param input_neurons_indices
*/
void specify_input_neurons(std::vector<size_t> &input_neurons_indices);
/**
*
* @param output_neurons_indices
*/
void specify_output_neurons(std::vector<size_t> &output_neurons_indices);
};
......
......@@ -116,6 +116,14 @@ std::vector<Connection*>* Neuron::get_connections_out() {
return this->edges_out;
}
size_t Neuron::get_idx() {
return this->neural_net_index;
}
void Neuron::set_idx(size_t value) {
this->neural_net_index = value;
}
//template<class Archive>
//void Neuron::serialize(Archive & ar, const unsigned int version) {
// ar << this->potential;
......
......@@ -53,6 +53,11 @@ protected:
*/
unsigned int n_saturated_connections_out = 0;
/**
* Index of this neuron among the neurons in the neural network
*/
size_t neural_net_index;
/**
* A pointer to a vector containing pointers to incoming connections
*/
......@@ -76,6 +81,12 @@ protected:
public:
/**
* Instantiates a copy of this object and returns a pointer to it
* @return
*/
virtual Neuron* get_copy( ) = 0;
/**
* Destructor of the Neuron object
* this level deallocates the array 'activation_function_parameters'
......@@ -206,6 +217,17 @@ public:
*/
virtual std::vector<Connection*>* get_connections_out( ) final;
/**
*
* @return
*/
size_t get_idx();
/**
*
* @param value
*/
void set_idx(size_t value);
}; /* end of Neuron class */
......
......@@ -4,6 +4,12 @@
#include "NeuronBinary.h"
Neuron* NeuronBinary::get_copy( ){
NeuronBinary* output = new NeuronBinary( this->activation_function_parameters[0] );
return output;
}
NeuronBinary::NeuronBinary(double threshold) {
this->n_activation_function_parameters = 2;
......
......@@ -27,6 +27,8 @@ protected:
public:
Neuron* get_copy( );
/**
* Default constructor for the binary Neuron
* @param[in] threshold Denotes when the neuron is activated
......
......@@ -5,6 +5,13 @@
#include <boost/serialization/base_object.hpp>
#include "NeuronLinear.h"
Neuron* NeuronLinear::get_copy( ){
NeuronLinear* output = new NeuronLinear( this->activation_function_parameters[0], this->activation_function_parameters[1]);
return output;
}
NeuronLinear::NeuronLinear(double a, double b) {
this->n_activation_function_parameters = 2;
......
......@@ -28,6 +28,8 @@ protected:
public:
Neuron* get_copy( );
/**
* Constructs the object of the Linear neuron with activation function
* f(x) = b * x + a
......
......@@ -5,6 +5,12 @@
#include "NeuronLogistic.h"
Neuron* NeuronLogistic::get_copy( ){
NeuronLogistic* output = new NeuronLogistic( this->activation_function_parameters[0], this->activation_function_parameters[1]);
return output;
}
NeuronLogistic::NeuronLogistic(double a, double b) {
this->n_activation_function_parameters = 2;
......