Skip to content
Snippets Groups Projects
Commit a3c47005 authored by Michal Kravcenko's avatar Michal Kravcenko
Browse files

- cleaned up the way neural nets are initialized and added some docs

parent 406953b0
No related branches found
No related tags found
No related merge requests found
......@@ -5,7 +5,7 @@
<usages-collector id="statistics.file.extensions.open">
<counts>
<entry key="Makefile" value="1" />
<entry key="cpp" value="58" />
<entry key="cpp" value="59" />
<entry key="f90" value="4" />
<entry key="h" value="71" />
<entry key="txt" value="7" />
......@@ -14,7 +14,7 @@
<usages-collector id="statistics.file.types.open">
<counts>
<entry key="CMakeLists.txt" value="7" />
<entry key="ObjectiveC" value="129" />
<entry key="ObjectiveC" value="130" />
<entry key="PLAIN_TEXT" value="5" />
</counts>
</usages-collector>
......
......@@ -14,9 +14,12 @@ Connection::Connection(Neuron *n_in, Neuron *n_out, ConnectionWeight* con) {
this->con = con;
}
//Connection::~Connection() {
//
//}
Connection::~Connection() {
if(this->con){
delete this->con;
this->con = nullptr;
}
}
void Connection::adjust_weights(double* values) {
this->con->adjust_weights(values);
......
......@@ -15,77 +15,73 @@ class Neuron;
class ConnectionWeight;
/**
*
* Class representing directed connection between two neurons
*/
class Connection {
private:
/**
*
* Pointer to an object evaluating the weight function
*/
ConnectionWeight *con = nullptr;
/**
*
* Pointer to the Neuron on the receiving end of this connection
*/
Neuron *neuron_in = nullptr;
/**
*
* Pointer to the Neuron on the signaling end of this connection
*/
Neuron *neuron_out = nullptr;
//TODO pridat gradient
//TODO pridat gradient pro ucely backpropagation
public:
/**
*
* @param[in] n_in
* @param[in] n_out
*/
/**
* Constructor
* @param[in] n_in Pointer to the Neuron on the receiving end of this connection
* @param[in] n_out Pointer to the Neuron on the signaling end of this connection
* @param[in] con Pointer to an object evaluating the weight function
*/
Connection(Neuron *n_in, Neuron *n_out, ConnectionWeight* con);
// Connection(Neuron *n_in, Neuron *n_out, Connection* ref_con);
/**
*
* Destructor, deletes the 'con' object
*/
~Connection()=default;
~Connection();
/**
*
* @param[in] values
* Takes the array of double values and alters the corresponding weights associated
* with the 'con' object
* @param[in] values Values to be added to the associated weights
*/
void adjust_weights(double *values);
/**
*
* @param[in] values
* Takes the array of double values and sets the corresponding weights associated
* with the 'con' object
* @param[in] values Values to be set to the associated weights
*/
void set_weights(double *values);
// /**
// *
// * @return
// */
// double get_weight();
/**
*
* Takes the output signal of Neuron 'neuron_out', multiplies it by the result of the
* weight function associated with this connection and adds the result to the potential
* of the Neuron 'neuron_in'
*/
void pass_signal();
/**
*
* Returns the pointer to the Neuron on the receiving end of this connection
* @return
*/
Neuron* get_neuron_in();
/**
*
* Returns the pointer to the Neuron on the signaling end of this connection
* @return
*/
Neuron* get_neuron_out();
......
......@@ -12,7 +12,7 @@ ConnectionWeight::ConnectionWeight() {
}
ConnectionWeight::ConnectionWeight(int param_count, std::function<double(double **, int)> f) {
ConnectionWeight::ConnectionWeight(int param_count, std::function<double(double **, int)> *f) {
this->param_ptrs = new double*[param_count];
this->n_params = param_count;
......@@ -49,5 +49,5 @@ void ConnectionWeight::SetParamPointer(double *param_ptr, int idx) {
}
double ConnectionWeight::eval() {
return this->weight_function(this->param_ptrs, this->n_params);
return (*this->weight_function)(this->param_ptrs, this->n_params);
}
\ No newline at end of file
......@@ -25,7 +25,7 @@ protected:
/**
*
*/
std::function<double(double **, int)> weight_function;
std::function<double(double **, int)> *weight_function = nullptr;
public:
......@@ -39,7 +39,7 @@ public:
* @param param_count
* @param f
*/
ConnectionWeight(int param_count, std::function<double(double **, int)> f);
ConnectionWeight(int param_count, std::function<double(double **, int)> *f);
/**
*
......
......@@ -19,8 +19,15 @@ class ConnectionWeightIdentity:public ConnectionWeight {
private:
public:
/**
*
*/
ConnectionWeightIdentity();
/**
*
* @return
*/
double eval() override;
};
......
......@@ -6,9 +6,11 @@
*/
#include "NeuralNetwork.h"
#include "../NetConnection/ConnectionWeightIdentity.h"
NeuralNetwork::NeuralNetwork() {
this->neurons = new std::vector<Neuron*>(0);
this->connection_weights = new std::vector<double>(0);
}
NeuralNetwork::~NeuralNetwork() {
......@@ -28,11 +30,63 @@ NeuralNetwork::~NeuralNetwork() {
delete this->active_eval_set;
this->active_eval_set = nullptr;
}
if(this->connection_weights){
delete this->connection_weights;
this->connection_weights = nullptr;
}
}
void NeuralNetwork::add_neuron(Neuron *n) {
int NeuralNetwork::add_neuron(Neuron *n) {
this->neurons->push_back(n);
this->in_out_determined = false;
return (int)this->neurons->size() - 1;
}
void NeuralNetwork::add_connection_simple(int n1_idx, int n2_idx, int weight_idx, double weight_value) {
if(weight_idx < 0 || weight_idx >= this->connection_weights->size()){
//this weight is a new one, we add it to the system of weights
this->connection_weights->push_back(weight_value);
weight_idx = (int)this->connection_weights->size() - 1;
}
Neuron *neuron_out = this->neurons->at(n1_idx);
Neuron *neuron_in = this->neurons->at(n2_idx);
ConnectionWeightIdentity *con_weight_u1u2 = new ConnectionWeightIdentity();
con_weight_u1u2->SetParamPointer(&this->connection_weights->at(weight_idx), 0);
Connection *u1u2 = new Connection(neuron_out, neuron_in, con_weight_u1u2);
neuron_out->add_connection_out(u1u2);
neuron_in->add_connection_in(u1u2);
}
void NeuralNetwork::add_connection_general(int n1_idx, int n2_idx, std::function<double(double **, int)> *f, int* weight_indices, double* weight_values, int n_weights) {
ConnectionWeight *con_weight_u1u2 = new ConnectionWeight(n_weights, f);
//we analyze weights
int weight_idx = 0;
double weight_value = 0.0;
for(int wi = 0; wi < n_weights; ++wi){
weight_idx = weight_indices[wi];
weight_value = weight_values[wi];
if(weight_idx < 0 || weight_idx >= this->connection_weights->size()){
//this weight is a new one, we add it to the system of weights
this->connection_weights->push_back(weight_value);
weight_indices[wi] = (int)this->connection_weights->size() - 1;
}
con_weight_u1u2->SetParamPointer(&this->connection_weights->at(weight_indices[wi]), wi);
}
Neuron *neuron_out = this->neurons->at(n1_idx);
Neuron *neuron_in = this->neurons->at(n2_idx);
Connection *u1u2 = new Connection(neuron_out, neuron_in, con_weight_u1u2);
neuron_out->add_connection_out(u1u2);
neuron_in->add_connection_in(u1u2);
}
void NeuralNetwork::determine_inputs_outputs() {
......
......@@ -42,6 +42,8 @@ private:
*/
std::vector<Neuron*>* output_neurons = nullptr;
std::vector<double>* connection_weights = nullptr;
/**
*
*/
......@@ -86,9 +88,30 @@ public:
/**
*
* @param n
* @param[in] n
* @return
*/
int add_neuron(Neuron* n);
/**
*
* @param[in] n1_idx
* @param[in] n2_idx
* @param[in] weight_idx
* @param[in] weight_value
*/
void add_connection_simple(int n1_idx, int n2_idx, int weight_idx, double weight_value);
/**
*
* @param n1_idx
* @param n2_idx
* @param f
* @param weight_indices
* @param weight_values
* @param n_weights
*/
void add_neuron(Neuron* n);
void add_connection_general(int n1_idx, int n2_idx, std::function<double(double **, int)> *f, int* weight_indices, double* weight_values, int n_weights);
......
......@@ -8,6 +8,9 @@ Neuron::~Neuron() {
}
if(this->edges_out){
for(auto *edge: *this->edges_out){
delete edge;
}
delete this->edges_out;
this->edges_out = nullptr;
}
......
......@@ -15,6 +15,7 @@
#include "NetConnection/ConnectionWeightIdentity.h"
//TODO prepsat tak, aby neuronova sit managovala destruktory vsech potrebnych objektu (kvuli serializaci)
/**
* Test of simple neural network
* Network should evaluate the function f(x) = x + 1
......@@ -22,28 +23,29 @@
void test1(){
std::vector<double> in(1);
std::vector<double> out(1);
NeuralNetwork net;
NeuronLinear u1(1.0, 1.0); //f(x) = x + 1.0
NeuronLinear u2(0.0, 1.0); //f(x) = x
auto * parameters = new double[1];
parameters[0] = 1.0;
// ConnectionWeight con_weight_u1u2(1, [](double ** params, int n_params){ return (*params[0]);});
ConnectionWeightIdentity con_weight_u1u2;
// con_weight_u1u2.SetParamPointer(&parameters[0], 0);
con_weight_u1u2.SetParamPointer(&parameters);
Connection u1u2(&u1, &u2, &con_weight_u1u2);
u1.add_connection_out(&u1u2);
u2.add_connection_in(&u1u2);
net.add_neuron(&u1);
net.add_neuron(&u2);
NeuralNetwork net;
NeuronLinear* u1 = new NeuronLinear(1.0, 1.0); //f(x) = x + 1.0
NeuronLinear* u2 = new NeuronLinear(0.0, 1.0); //f(x) = x
int idx1 = net.add_neuron(u1);
int idx2 = net.add_neuron(u2);
////////////////////// SIMPLE EDGE WEIGHT ////////////////////////////////////////
// net.add_connection_simple(idx1, idx2, -1, 1.0);
////////////////////// END SIMPLE EDGE WEIGHT ////////////////////////////////////////
/////////////////////////BEGIN OF COMPLEX EDGE WEIGHT//////////////////////////////
//TODO vyresit memleak
std::function<double(double **, int)> weight_function = [](double ** params, int n_params){
//w(x, y) = x + y
double a = (*(params[0]));
double b = (*(params[1]));
printf("eval: %f, %f\n", a, b);
return (a + 0.0 * b);
};
int weight_indices [2]= {0, -1};
double weight_values [2] = {1.0, 5.0};
net.add_connection_general(idx1, idx2, &weight_function, weight_indices, weight_values, 2);
/////////////////////////END OF COMPLEX EDGE WEIGHT//////////////////////////////
for(int i = 0; i < 20; ++i){
in[0] = 0.05 * i;
net.eval_single(in, out);
......@@ -51,11 +53,9 @@ void test1(){
printf("x = %3.2f, f(x) = %3.2f, expected output = %3.2f\n", in[0], out[0], in[0] + 1.0);
}
delete [] parameters;
//clean-up phase
delete u1;
delete u2;
}
int main(int argc, char** argv){
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment