diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 665f63ed607f641597bce58a72336200a30a783a..1cdb7c1835b35b42ceefe1a8280d8cb3cd0e7a76 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -13,7 +13,7 @@ add_library(4neuro SHARED
         NetConnection/ConnectionWeightIdentity.cpp
         LearningMethods/ParticleSwarm.cpp
         DataSet/DataSet.cpp
-        ErrorFunction/ErrorFunctions.cpp Network/NeuralNetworkSum.cpp Network/NeuralNetworkSum.h Solvers/PDESolver.cpp Solvers/PDESolver.h Neuron/NeuronLogistic_d1.cpp Neuron/NeuronLogistic_d1.h Neuron/NeuronLogistic_d2.cpp Neuron/NeuronLogistic_d2.h)
+        ErrorFunction/ErrorFunctions.cpp Network/NeuralNetworkSum.cpp Network/NeuralNetworkSum.h Solvers/DESolver.cpp Solvers/DESolver.h)
 
 target_link_libraries(4neuro boost_serialization)
 
diff --git a/src/ErrorFunction/ErrorFunctions.cpp b/src/ErrorFunction/ErrorFunctions.cpp
index a98e0603295b882c81b5f4b2422ca179eec66a0e..1dc9d143959ebb2bfdd9f58efe6754204545e39e 100644
--- a/src/ErrorFunction/ErrorFunctions.cpp
+++ b/src/ErrorFunction/ErrorFunctions.cpp
@@ -19,7 +19,7 @@ MSE::MSE(NeuralNetwork *net, DataSet *ds) {
 
 double MSE::eval(double *weights) {
     unsigned int dim_out = this->ds->get_output_dim();
-    unsigned int dim_in = this->ds->get_input_dim();
+//    unsigned int dim_in = this->ds->get_input_dim();
     size_t n_elements = this->ds->get_n_elements();
     double error = 0.0, val;
 
@@ -51,13 +51,13 @@ double MSE::eval(double *weights) {
     return error/n_elements;
 }
 
-MSE_SUM::MSE_SUM() {
+ERROR_SUM::ERROR_SUM() {
     this->summand = nullptr;
     this->summand_coefficient = nullptr;
     this->dimension = 0;
 }
 
-MSE_SUM::~MSE_SUM(){
+ERROR_SUM::~ERROR_SUM(){
     if( this->summand ){
         delete this->summand;
     }
@@ -66,17 +66,17 @@ MSE_SUM::~MSE_SUM(){
     }
 }
 
-double MSE_SUM::eval(double *weights) {
+double ERROR_SUM::eval(double *weights) {
     double output = 0.0;
 
-    for( int i = 0; i < this->summand->size(); ++i ){
+    for( unsigned int i = 0; i < this->summand->size(); ++i ){
         output += this->summand->at( i )->eval( weights ) * this->summand_coefficient->at( i );
     }
 
     return output;
 }
 
-void MSE_SUM::add_error_function(ErrorFunction *F, double alpha) {
+void ERROR_SUM::add_error_function(ErrorFunction *F, double alpha) {
     if(!this->summand){
         this->summand = new std::vector<ErrorFunction*>(0);
     }
@@ -92,7 +92,7 @@ void MSE_SUM::add_error_function(ErrorFunction *F, double alpha) {
     }
 }
 
-size_t MSE_SUM::get_dimension() {
+size_t ERROR_SUM::get_dimension() {
 //    if(!this->dimension) {
 //        size_t max = 0;
 //        for(auto e : *this->summand) {
diff --git a/src/ErrorFunction/ErrorFunctions.h b/src/ErrorFunction/ErrorFunctions.h
index 48c0731e64c532111620bb7049ed3d9927a0a5ab..cae1d0ffcb7bf567a0945a7752bd39012bae4631 100644
--- a/src/ErrorFunction/ErrorFunctions.h
+++ b/src/ErrorFunction/ErrorFunctions.h
@@ -8,6 +8,10 @@
 #include "../Network/NeuralNetwork.h"
 #include "../DataSet/DataSet.h"
 
+enum ErrorFunctionType{
+    ErrorFuncMSE
+};
+
 class ErrorFunction {
 public:
 
@@ -55,17 +59,17 @@ private:
     DataSet* ds;
 };
 
-class MSE_SUM : public ErrorFunction{
+class ERROR_SUM : public ErrorFunction{
 public:
     /**
      *
      */
-    MSE_SUM();
+    ERROR_SUM();
 
     /**
      *
      */
-    ~MSE_SUM();
+    ~ERROR_SUM();
 
     /**
      *
diff --git a/src/Network/NeuralNetwork.cpp b/src/Network/NeuralNetwork.cpp
index 0cde711c3af3c0e4c862063062d4c30eb71336dd..c5331f20986521a791a5d9f75e484316d2a5aad6 100644
--- a/src/Network/NeuralNetwork.cpp
+++ b/src/Network/NeuralNetwork.cpp
@@ -401,7 +401,7 @@ void NeuralNetwork::determine_inputs_outputs() {
 }
 
 void NeuralNetwork::set_weight_array(std::vector<double> *weight_ptr) {
-    if(this->connection_weights){
+    if( this->connection_weights && this->delete_weights ){
         delete this->connection_weights;
     }
     this->connection_weights = weight_ptr;
diff --git a/src/Neuron/Neuron.h b/src/Neuron/Neuron.h
index 7379301f97e2aab290bf9c9f7d0b6d4d064c9bf6..ee8290c57a1075d7b10d549b98b7b3d80ab4ceab 100644
--- a/src/Neuron/Neuron.h
+++ b/src/Neuron/Neuron.h
@@ -259,6 +259,7 @@ class IDifferentiable {
      * @return
      */
     virtual Neuron* get_derivative( );
+
 }; /* end of IDifferentiable class */
 
  #endif /* NEURON_H_ */
\ No newline at end of file
diff --git a/src/Neuron/NeuronLogistic.cpp b/src/Neuron/NeuronLogistic.cpp
index 21d5bee4e9635a04b83e1a7c83d805570f698f00..ac873d1a4488444d353291b3d382745001a75542 100644
--- a/src/Neuron/NeuronLogistic.cpp
+++ b/src/Neuron/NeuronLogistic.cpp
@@ -5,6 +5,172 @@
 
 #include "NeuronLogistic.h"
 
+Neuron* NeuronLogistic_d2::get_copy( ){
+    NeuronLogistic_d2* output = new NeuronLogistic_d2( this->activation_function_parameters[0],  this->activation_function_parameters[1]);
+
+    return output;
+}
+
+NeuronLogistic_d2::NeuronLogistic_d2(double a, double b) {
+
+    this->n_activation_function_parameters = 2;
+    this->activation_function_parameters = new double[2];
+
+    this->activation_function_parameters[0] = a;
+    this->activation_function_parameters[1] = b;
+
+    this->edges_in = new std::vector<Connection*>(0);
+    this->edges_out = new std::vector<Connection*>(0);
+}
+
+void NeuronLogistic_d2::activate( ) {
+    //[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
+
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    double x = this->potential;
+
+    double ex = std::pow(E, b - x);
+    double ex2 = std::pow(ex + 1.0, -a - 1.0);
+    double ex3 = 1.0 / (1.0 + ex);
+
+    this->state = -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
+
+}
+
+double NeuronLogistic_d2::activation_function_eval_partial_derivative(int param_idx ) {
+
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    double x = this->potential;
+
+    if(param_idx == 0){
+        //partial derivative according to parameter 'a'
+        //(e^b (e^(b - x) + 1)^(-a) (a^2 (-e^b) log(e^(b - x) + 1) + a e^x log(e^(b - x) + 1) + 2 a e^b - e^x))/(e^b + e^x)^2
+        double eb = std::pow(E, b);
+        double ex = std::pow(E, x);
+        double ebx= eb / ex;
+        double ebxa = std::pow(ebx + 1.0, -a);
+        double lbx = std::log(ebx + 1.0);
+
+        return eb * ebxa * (a * lbx * ( a * (-eb) + ex ) + 2.0 * a * eb - ex) / ((eb + ex) * (eb + ex));
+    }
+    else if(param_idx == 1){
+        //partial derivative according to parameter 'b'
+        //-(a e^b (e^(b - x) + 1)^(-a) (a^2 e^(2 b) - 3 a e^(b + x) - e^(b + x) + e^(2 x)))/(e^b + e^x)^3
+
+        double eb = std::pow(E, b);
+        double ex = std::pow(E, x);
+        double ebx= eb / ex;
+        double ebxa = std::pow(ebx + 1.0, -a);
+
+        return  - a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
+    }
+
+    return 0.0;
+}
+
+double NeuronLogistic_d2::activation_function_eval_derivative() {
+    //(a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    double x = this->potential;
+
+    double eb = std::pow(E, b);
+    double ex = std::pow(E, x);
+    double ebx= eb / ex;
+    double ebxa = std::pow(ebx + 1.0, -a);
+
+    return  a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
+}
+
+
+Neuron* NeuronLogistic_d1::get_copy( ){
+    NeuronLogistic_d1* output = new NeuronLogistic_d1( this->activation_function_parameters[0],  this->activation_function_parameters[1]);
+
+    return output;
+}
+
+NeuronLogistic_d1::NeuronLogistic_d1(double a, double b) {
+
+    this->n_activation_function_parameters = 2;
+    this->activation_function_parameters = new double[2];
+
+    this->activation_function_parameters[0] = a;
+    this->activation_function_parameters[1] = b;
+
+    this->edges_in = new std::vector<Connection*>(0);
+    this->edges_out = new std::vector<Connection*>(0);
+}
+
+
+void NeuronLogistic_d1::activate( ) {
+    //a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
+
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    double x = this->potential;
+
+    double ex = std::pow(E, b - x);
+    this->state = a * ex * std::pow(1.0 + ex, -a - 1.0);
+
+}
+
+double NeuronLogistic_d1::activation_function_eval_partial_derivative(int param_idx ) {
+
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    double x = this->potential;
+
+    if(param_idx == 0){
+        //partial derivative according to parameter 'a'
+        //e^(b - x) (1 + e^(b - x))^(-1 - a)[1 - a log(1 + e^(b - x))]
+        double ex = std::pow(E, b - x);
+        double ex2= std::pow(1.0 + ex, -1.0 - a);
+
+        return ex * ex2 * (1.0 - a * std::log(1.0 + ex));
+    }
+    else if(param_idx == 1){
+        //partial derivative according to parameter 'b'
+        //[(-1 - a) e^(b-x) (1 + e^(b - x))^(-1) + 1] * a e^(b - x) (1 + e^(b - x))^(-1 - a)
+        double ex = std::pow(E, b - x);
+        double ex2 = std::pow(ex + 1.0, -a - 1.0);
+        double ex3 = 1.0 / (1.0 + ex);
+
+        return a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
+    }
+
+    return 0.0;
+}
+
+double NeuronLogistic_d1::activation_function_eval_derivative() {
+    //[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
+
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    double x = this->potential;
+
+    double ex = std::pow(E, b - x);
+    double ex2 = std::pow(ex + 1.0, -a - 1.0);
+    double ex3 = 1.0 / (1.0 + ex);
+
+    return -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
+}
+
+NeuronLogistic_d2* NeuronLogistic_d1::get_derivative() {
+
+    //[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
+    NeuronLogistic_d2* output = nullptr;
+
+    double a = this->activation_function_parameters[0];
+    double b = this->activation_function_parameters[1];
+    output = new NeuronLogistic_d2(a, b);
+
+    return output;
+
+}
+
+
 Neuron* NeuronLogistic::get_copy( ){
     NeuronLogistic* output = new NeuronLogistic( this->activation_function_parameters[0],  this->activation_function_parameters[1]);
 
@@ -77,7 +243,8 @@ double NeuronLogistic::activation_function_eval_derivative( ) {
 
 }
 
-Neuron* NeuronLogistic::get_derivative() {
+NeuronLogistic_d1* NeuronLogistic::get_derivative() {
+
     NeuronLogistic_d1 *output = nullptr;
     double a = this->activation_function_parameters[0];
     double b = this->activation_function_parameters[1];
@@ -86,4 +253,5 @@ Neuron* NeuronLogistic::get_derivative() {
     output->set_potential( this->potential );
 
     return output;
+
 }
\ No newline at end of file
diff --git a/src/Neuron/NeuronLogistic.h b/src/Neuron/NeuronLogistic.h
index f8ddd10759ed190221b65a664b125881c3ad164d..c290870106471d27580ab28f94fe4e3f3cedc85c 100644
--- a/src/Neuron/NeuronLogistic.h
+++ b/src/Neuron/NeuronLogistic.h
@@ -12,9 +12,100 @@
 
 #include <cmath>
 #include "Neuron.h"
-#include "NeuronLogistic_d1.h"
 #include "../constants.h"
 
+class NeuronLogistic_d2:public Neuron, public IDifferentiable {
+    friend class boost::serialization::access;
+protected:
+    template<class Archive>
+    void serialize(Archive & ar, const unsigned int version){
+        //TODO separate implementation to NeuronLogistic_d1.cpp!
+        ar & boost::serialization::base_object<Neuron>(*this);
+    };
+public:
+
+    Neuron* get_copy( );
+
+    /**
+     * Constructs the object of the Logistic neuron with activation function
+     * f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
+     * @param[in] a First coefficient, stored in activation_function_parameters[0]
+     * @param[in] b Second coefficient, stored in activation_function_parameters[1]
+     */
+    explicit NeuronLogistic_d2(double a = 0.0, double b = 0.0);
+
+    /**
+     * Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
+     */
+    void activate( ) override;
+
+    /**
+     * Calculates the partial derivative of the activation function
+     * f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
+     * @param[in] param_idx Index of the parameter to calculate derivative of
+     * @return Partial derivative of the activation function according to the
+     * 'param_idx'-th parameter.
+     */
+    double activation_function_eval_partial_derivative(int param_idx) override;
+
+    /**
+     * Calculates d/dx of  [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
+     * @return (a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
+     */
+    double activation_function_eval_derivative( ) override;
+
+};
+
+
+
+class NeuronLogistic_d1:public Neuron, public IDifferentiable {
+    friend class boost::serialization::access;
+protected:
+    template<class Archive>
+    void serialize(Archive & ar, const unsigned int version){
+        //TODO separate implementation to NeuronLogistic_d1.cpp!
+        ar & boost::serialization::base_object<Neuron>(*this);
+    };
+public:
+
+    Neuron* get_copy( );
+
+    /**
+     * Constructs the object of the Logistic neuron with activation function
+     * f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
+     * @param[in] a First coefficient, stored in activation_function_parameters[0]
+     * @param[in] b Second coefficient, stored in activation_function_parameters[1]
+     */
+    explicit NeuronLogistic_d1(double a = 0.0, double b = 0.0);
+
+    /**
+     * Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
+     */
+    void activate( ) override;
+
+    /**
+     * Calculates the partial derivative of the activation function
+     * f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
+     * @param[in] param_idx Index of the parameter to calculate derivative of
+     * @return Partial derivative of the activation function according to the
+     * 'param_idx'-th parameter.
+     */
+    double activation_function_eval_partial_derivative(int param_idx) override;
+
+    /**
+     * Calculates d/dx of  [a*e^(b - x)*(1 + e^(b - x))^(-1 - a)]
+     * @return [[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)]
+     */
+    double activation_function_eval_derivative( ) override;
+
+    /**
+     * Returns a pointer to a Neuron with derivative as its activation function
+     * @return
+     */
+    NeuronLogistic_d2* get_derivative() override;
+};
+
+
 class NeuronLogistic:public Neuron, public IDifferentiable {
     friend class boost::serialization::access;
 
@@ -59,7 +150,7 @@ public:
      * Returns a pointer to a Neuron with derivative as its activation function
      * @return
      */
-    Neuron* get_derivative() override;
+    NeuronLogistic_d1* get_derivative() override;
 };
 
 
diff --git a/src/Neuron/NeuronLogistic_d1.cpp b/src/Neuron/NeuronLogistic_d1.cpp
deleted file mode 100644
index fd5acd3dc3121e19cbdb87b2f2c120a3c47df7dc..0000000000000000000000000000000000000000
--- a/src/Neuron/NeuronLogistic_d1.cpp
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * DESCRIPTION OF THE FILE
- *
- * @author Michal KravÄŤenko
- * @date 22.7.18 -
- */
-
-#include "NeuronLogistic_d1.h"
-
-Neuron* NeuronLogistic_d1::get_copy( ){
-    NeuronLogistic_d1* output = new NeuronLogistic_d1( this->activation_function_parameters[0],  this->activation_function_parameters[1]);
-
-    return output;
-}
-
-NeuronLogistic_d1::NeuronLogistic_d1(double a, double b) {
-
-    this->n_activation_function_parameters = 2;
-    this->activation_function_parameters = new double[2];
-
-    this->activation_function_parameters[0] = a;
-    this->activation_function_parameters[1] = b;
-
-    this->edges_in = new std::vector<Connection*>(0);
-    this->edges_out = new std::vector<Connection*>(0);
-}
-
-
-void NeuronLogistic_d1::activate( ) {
-    //a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
-
-    double a = this->activation_function_parameters[0];
-    double b = this->activation_function_parameters[1];
-    double x = this->potential;
-
-    double ex = std::pow(E, b - x);
-    this->state = a * ex * std::pow(1.0 + ex, -a - 1.0);
-
-}
-
-double NeuronLogistic_d1::activation_function_eval_partial_derivative(int param_idx ) {
-
-    double a = this->activation_function_parameters[0];
-    double b = this->activation_function_parameters[1];
-    double x = this->potential;
-
-    if(param_idx == 0){
-        //partial derivative according to parameter 'a'
-        //e^(b - x) (1 + e^(b - x))^(-1 - a)[1 - a log(1 + e^(b - x))]
-        double ex = std::pow(E, b - x);
-        double ex2= std::pow(1.0 + ex, -1.0 - a);
-
-        return ex * ex2 * (1.0 - a * std::log(1.0 + ex));
-    }
-    else if(param_idx == 1){
-        //partial derivative according to parameter 'b'
-        //[(-1 - a) e^(b-x) (1 + e^(b - x))^(-1) + 1] * a e^(b - x) (1 + e^(b - x))^(-1 - a)
-        double ex = std::pow(E, b - x);
-        double ex2 = std::pow(ex + 1.0, -a - 1.0);
-        double ex3 = 1.0 / (1.0 + ex);
-
-        return a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
-    }
-
-    return 0.0;
-}
-
-double NeuronLogistic_d1::activation_function_eval_derivative() {
-    //[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
-
-    double a = this->activation_function_parameters[0];
-    double b = this->activation_function_parameters[1];
-    double x = this->potential;
-
-    double ex = std::pow(E, b - x);
-    double ex2 = std::pow(ex + 1.0, -a - 1.0);
-    double ex3 = 1.0 / (1.0 + ex);
-
-    return -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
-}
-
- Neuron* NeuronLogistic_d1::get_derivative() {
-    //[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
-    NeuronLogistic_d2* output = nullptr;
-
-     double a = this->activation_function_parameters[0];
-     double b = this->activation_function_parameters[1];
-     output = new NeuronLogistic_d2(a, b);
-
-    return output;
-}
\ No newline at end of file
diff --git a/src/Neuron/NeuronLogistic_d1.h b/src/Neuron/NeuronLogistic_d1.h
deleted file mode 100644
index 66e5ed8b9615cc92b3fec08aef8e295e85a02330..0000000000000000000000000000000000000000
--- a/src/Neuron/NeuronLogistic_d1.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * DESCRIPTION OF THE FILE
- *
- * @author Michal KravÄŤenko
- * @date 22.7.18 -
- */
-
-#ifndef INC_4NEURO_NEURONLOGISTIC_D1_H
-#define INC_4NEURO_NEURONLOGISTIC_D1_H
-
-#include <cmath>
-#include "Neuron.h"
-#include "NeuronLogistic_d2.h"
-#include "../constants.h"
-
-class NeuronLogistic_d1:public Neuron, public IDifferentiable {
-    friend class boost::serialization::access;
-protected:
-    template<class Archive>
-    void serialize(Archive & ar, const unsigned int version){
-        //TODO separate implementation to NeuronLogistic_d1.cpp!
-        ar & boost::serialization::base_object<Neuron>(*this);
-    };
-public:
-
-    Neuron* get_copy( );
-
-    /**
-     * Constructs the object of the Logistic neuron with activation function
-     * f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
-     * @param[in] a First coefficient, stored in activation_function_parameters[0]
-     * @param[in] b Second coefficient, stored in activation_function_parameters[1]
-     */
-    explicit NeuronLogistic_d1(double a = 0.0, double b = 0.0);
-
-    /**
-     * Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
-     */
-    void activate( ) override;
-
-    /**
-     * Calculates the partial derivative of the activation function
-     * f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
-     * @param[in] param_idx Index of the parameter to calculate derivative of
-     * @return Partial derivative of the activation function according to the
-     * 'param_idx'-th parameter.
-     */
-    double activation_function_eval_partial_derivative(int param_idx) override;
-
-    /**
-     * Calculates d/dx of  [a*e^(b - x)*(1 + e^(b - x))^(-1 - a)]
-     * @return [[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)]
-     */
-    double activation_function_eval_derivative( ) override;
-
-    /**
-     * Returns a pointer to a Neuron with derivative as its activation function
-     * @return
-     */
-    Neuron* get_derivative() override;
-};
-
-
-#endif //INC_4NEURO_NEURONLOGISTIC_D1_H
diff --git a/src/Neuron/NeuronLogistic_d2.cpp b/src/Neuron/NeuronLogistic_d2.cpp
deleted file mode 100644
index bfa30f086566cbcc7d2da234c7956abd8c8d687d..0000000000000000000000000000000000000000
--- a/src/Neuron/NeuronLogistic_d2.cpp
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * DESCRIPTION OF THE FILE
- *
- * @author Michal KravÄŤenko
- * @date 22.7.18 -
- */
-
-#include "NeuronLogistic_d2.h"
-
-Neuron* NeuronLogistic_d2::get_copy( ){
-    NeuronLogistic_d2* output = new NeuronLogistic_d2( this->activation_function_parameters[0],  this->activation_function_parameters[1]);
-
-    return output;
-}
-
-NeuronLogistic_d2::NeuronLogistic_d2(double a, double b) {
-
-    this->n_activation_function_parameters = 2;
-    this->activation_function_parameters = new double[2];
-
-    this->activation_function_parameters[0] = a;
-    this->activation_function_parameters[1] = b;
-
-    this->edges_in = new std::vector<Connection*>(0);
-    this->edges_out = new std::vector<Connection*>(0);
-}
-
-void NeuronLogistic_d2::activate( ) {
-    //[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
-
-    double a = this->activation_function_parameters[0];
-    double b = this->activation_function_parameters[1];
-    double x = this->potential;
-
-    double ex = std::pow(E, b - x);
-    double ex2 = std::pow(ex + 1.0, -a - 1.0);
-    double ex3 = 1.0 / (1.0 + ex);
-
-    this->state = -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
-
-}
-
-double NeuronLogistic_d2::activation_function_eval_partial_derivative(int param_idx ) {
-
-    double a = this->activation_function_parameters[0];
-    double b = this->activation_function_parameters[1];
-    double x = this->potential;
-
-    if(param_idx == 0){
-        //partial derivative according to parameter 'a'
-        //(e^b (e^(b - x) + 1)^(-a) (a^2 (-e^b) log(e^(b - x) + 1) + a e^x log(e^(b - x) + 1) + 2 a e^b - e^x))/(e^b + e^x)^2
-        double eb = std::pow(E, b);
-        double ex = std::pow(E, x);
-        double ebx= eb / ex;
-        double ebxa = std::pow(ebx + 1.0, -a);
-        double lbx = std::log(ebx + 1.0);
-
-        return eb * ebxa * (a * lbx * ( a * (-eb) + ex ) + 2.0 * a * eb - ex) / ((eb + ex) * (eb + ex));
-    }
-    else if(param_idx == 1){
-        //partial derivative according to parameter 'b'
-        //-(a e^b (e^(b - x) + 1)^(-a) (a^2 e^(2 b) - 3 a e^(b + x) - e^(b + x) + e^(2 x)))/(e^b + e^x)^3
-
-        double eb = std::pow(E, b);
-        double ex = std::pow(E, x);
-        double ebx= eb / ex;
-        double ebxa = std::pow(ebx + 1.0, -a);
-
-        return  - a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
-    }
-
-    return 0.0;
-}
-
-double NeuronLogistic_d2::activation_function_eval_derivative() {
-    //(a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
-    double a = this->activation_function_parameters[0];
-    double b = this->activation_function_parameters[1];
-    double x = this->potential;
-
-    double eb = std::pow(E, b);
-    double ex = std::pow(E, x);
-    double ebx= eb / ex;
-    double ebxa = std::pow(ebx + 1.0, -a);
-
-    return  a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
-}
diff --git a/src/Neuron/NeuronLogistic_d2.h b/src/Neuron/NeuronLogistic_d2.h
deleted file mode 100644
index d40bce0bda0425473f113b9fc4e2b1f419160be6..0000000000000000000000000000000000000000
--- a/src/Neuron/NeuronLogistic_d2.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * DESCRIPTION OF THE FILE
- *
- * @author Michal KravÄŤenko
- * @date 22.7.18 -
- */
-
-#ifndef INC_4NEURO_NEURONLOGISTIC_D2_H
-#define INC_4NEURO_NEURONLOGISTIC_D2_H
-
-
-#include <cmath>
-#include "Neuron.h"
-#include "../constants.h"
-
-class NeuronLogistic_d2:public Neuron, public IDifferentiable {
-    friend class boost::serialization::access;
-protected:
-    template<class Archive>
-    void serialize(Archive & ar, const unsigned int version){
-        //TODO separate implementation to NeuronLogistic_d1.cpp!
-        ar & boost::serialization::base_object<Neuron>(*this);
-    };
-public:
-
-    Neuron* get_copy( );
-
-    /**
-     * Constructs the object of the Logistic neuron with activation function
-     * f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
-     * @param[in] a First coefficient, stored in activation_function_parameters[0]
-     * @param[in] b Second coefficient, stored in activation_function_parameters[1]
-     */
-    explicit NeuronLogistic_d2(double a = 0.0, double b = 0.0);
-
-    /**
-     * Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
-     */
-    void activate( ) override;
-
-    /**
-     * Calculates the partial derivative of the activation function
-     * f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
-     * @param[in] param_idx Index of the parameter to calculate derivative of
-     * @return Partial derivative of the activation function according to the
-     * 'param_idx'-th parameter.
-     */
-    double activation_function_eval_partial_derivative(int param_idx) override;
-
-    /**
-     * Calculates d/dx of  [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
-     * @return (a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
-     */
-    double activation_function_eval_derivative( ) override;
-
-};
-
-
-#endif //INC_4NEURO_NEURONLOGISTIC_D2_H
diff --git a/src/Solvers/DESolver.cpp b/src/Solvers/DESolver.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..cda2606bebf231e5b3304abf371451a8e7cac025
--- /dev/null
+++ b/src/Solvers/DESolver.cpp
@@ -0,0 +1,358 @@
+/**
+ * DESCRIPTION OF THE FILE
+ *
+ * @author Michal KravÄŤenko
+ * @date 22.7.18 -
+ */
+
+#include "DESolver.h"
+
+MultiIndex::MultiIndex(unsigned int dimension) {
+    this->dim = dimension;
+    this->partial_derivatives_degrees.resize( this->dim );
+    std::fill( this->partial_derivatives_degrees.begin(), this->partial_derivatives_degrees.end(), 0 );
+}
+
+void MultiIndex::set_partial_derivative(unsigned int index, unsigned int value) {
+    this->partial_derivatives_degrees[ index ] = value;
+}
+
+std::vector<unsigned int>* MultiIndex::get_partial_derivatives_degrees() {
+    return &this->partial_derivatives_degrees;
+}
+
+bool MultiIndex::operator<(const MultiIndex &rhs) const {
+    if(dim < rhs.dim){ return true; }
+
+    for(unsigned int i = 0; i < dim; ++i){
+        if(partial_derivatives_degrees[i] < rhs.partial_derivatives_degrees[i]){
+            return true;
+        }
+    }
+    return false;
+}
+
+std::string MultiIndex::to_string( )const {
+    std::string output;
+    char buff[ 255 ];
+
+    for( unsigned int i = 0; i < this->dim - 1; ++i){
+        sprintf(buff, "%d, ", this->partial_derivatives_degrees[i]);
+        output.append( buff );
+    }
+    sprintf(buff, "%d", this->partial_derivatives_degrees[this->dim - 1]);
+    output.append( buff );
+
+    return output;
+}
+
+unsigned int MultiIndex::get_degree() const{
+    unsigned int output = 0;
+
+    for( auto i: this->partial_derivatives_degrees ){
+        output += i;
+    }
+
+    return output;
+}
+
+
+DESolver::DESolver( unsigned int n_equations, unsigned int n_inputs, unsigned int m, unsigned int n_outputs ) {
+
+    if( m <= 0 || n_inputs <= 0 || n_outputs <= 0 || n_equations <= 0 ){
+        throw std::invalid_argument("Parameters 'm', 'n_equations', 'n_inputs' and 'n_outputs' must be greater than zero!");
+    }
+    printf("Differential Equation Solver with %d equations\n--------------------------------------------------------------------------\n", n_equations);
+
+    printf("Constructing NN structure representing the solution [%d input neurons][%d inner neurons][%d output neurons]...", n_inputs, m, n_outputs);
+
+    this->dim_i = n_inputs;
+    this->dim_o = n_outputs;
+    this->dim_inn= m;
+    this->n_equations = n_equations;
+
+    this->solution = new NeuralNetwork();
+
+    this->solution_inner_neurons = new std::vector<NeuronLogistic*>(0);
+    this->solution_inner_neurons->reserve( m );
+
+    /* input neurons */
+    std::vector<size_t> input_set( this->dim_i );
+    for( unsigned int i = 0; i < this->dim_i; ++i ){
+        NeuronLinear *input_i = new NeuronLinear(0.0, 1.0);  //f(x) = x
+        this->solution->add_neuron( input_i );
+        input_set[i] = i;
+    }
+    this->solution->specify_input_neurons( input_set );
+
+    /* output neurons */
+    std::vector<size_t> output_set( this->dim_o );
+    for( unsigned int i = 0; i < this->dim_o; ++i ){
+        NeuronLinear *output_i = new NeuronLinear(0.0, 1.0);  //f(x) = x
+        this->solution->add_neuron( output_i );
+        output_set[i] = i + this->dim_i;
+    }
+    this->solution->specify_output_neurons( output_set );
+
+    /* inner neurons */
+    for(unsigned int i = 0; i < this->dim_inn; ++i){
+        NeuronLogistic *inner_i = new NeuronLogistic(1.0, 0.0); //f(x) = 1.0 / (1.0 + e^(-x))
+        this->solution_inner_neurons->push_back( inner_i );
+        this->solution->add_neuron( inner_i );
+    }
+
+    /* connections between input neurons and inner neurons */
+    unsigned int dim_shift = this->dim_i + this->dim_o;
+    for(unsigned int i = 0; i < this->dim_i; ++i){
+        for(unsigned int j = 0; j < this->dim_inn; ++j){
+            this->solution->add_connection_simple(i, dim_shift + j);
+        }
+    }
+
+    /* connections between inner neurons and output neurons */
+    for(unsigned int i = 0; i < this->dim_inn; ++i){
+        for(unsigned int j = 0; j < this->dim_o; ++j){
+            this->solution->add_connection_simple(dim_shift + i, this->dim_i + j);
+        }
+    }
+
+    MultiIndex initial_mi(this->dim_i);
+
+    this->map_multiindices2nn[initial_mi] = this->solution;
+
+    this->differential_equations = new std::vector<NeuralNetworkSum*>(this->n_equations);
+
+    for( unsigned int i = 0; i < this->n_equations; ++i ){
+        this->differential_equations->at( i ) = new NeuralNetworkSum();
+    }
+
+    this->errors_functions_types = new std::vector<ErrorFunctionType >(this->n_equations);
+    this->errors_functions_data_sets = new std::vector<DataSet*>(this->n_equations);
+
+    printf(" done\n");
+
+}
+
+DESolver::~DESolver() {
+
+    if( this->solution_inner_neurons ){
+        delete this->solution_inner_neurons;
+        this->solution_inner_neurons = nullptr;
+    }
+
+    if( this->differential_equations ){
+        delete this->differential_equations;
+        this->differential_equations = nullptr;
+    }
+
+    if( this->errors_functions_types ){
+        delete this->errors_functions_types;
+        this->errors_functions_types = nullptr;
+    }
+
+    if( this->errors_functions_data_sets ){
+        delete this->errors_functions_data_sets;
+        this->errors_functions_data_sets = nullptr;
+    }
+
+
+
+}
+
+void DESolver::add_to_differential_equation( unsigned int equation_idx, MultiIndex &alpha, double beta ) {
+
+    if( equation_idx >= this->n_equations ){
+        throw std::invalid_argument( "The provided equation index is too large!" );
+    }
+
+    unsigned int derivative_degree = alpha.get_degree( );
+
+    if( derivative_degree > 2 ){
+        throw std::invalid_argument("The supplied multi-index represents partial derivative of order higher than 2! (Valid degree is at most 2)\n");
+    }
+
+    /* retrieve indices of the variables according to which we perform the derivations ( applicable to any order, not just 2 or less )*/
+    std::vector<unsigned int> partial_derivative_indices;
+    partial_derivative_indices.reserve(derivative_degree);
+    for( unsigned int i = 0; i < alpha.get_partial_derivatives_degrees()->size( ); ++i ){
+        unsigned int degree = alpha.get_partial_derivatives_degrees()->at( i );
+
+        while( degree > 0 ){
+
+            partial_derivative_indices.push_back( i );
+            degree--;
+
+        }
+    }
+
+
+
+//    for( auto idx: map_multiindices2nn ){
+////        printf("%s\n", idx.first.to_string().c_str());
+//        printf("Is %s < %s? [%d]\n", idx.first.to_string().c_str(), alpha.to_string().c_str(), idx.first < alpha );
+//        printf("Is %s < %s? [%d]\n", alpha.to_string().c_str(), idx.first.to_string().c_str(), alpha < idx.first );
+//    }
+
+    NeuralNetwork *new_net = nullptr;
+    /* we check whether the new multi-index is already present */
+    if(map_multiindices2nn.find( alpha ) != map_multiindices2nn.end()){
+        new_net = map_multiindices2nn[ alpha ];
+        this->differential_equations->at( equation_idx )->add_network( new_net, beta );
+        printf("Adding an existing partial derivative (multi-index: %s) to equation %d with coefficient %f\n", alpha.to_string().c_str(), equation_idx, beta);
+        return;
+    }
+    printf("Adding a new partial derivative (multi-index: %s) to equation %d with coefficient %f\n", alpha.to_string().c_str(), equation_idx, beta);
+
+    /* we need to construct a new neural network */
+     new_net = new NeuralNetwork( );
+     new_net->set_weight_array( this->solution->get_weights_ptr() );
+
+    /* input neurons */
+    std::vector<size_t> input_set( this->dim_i );
+    for( unsigned int i = 0; i < this->dim_i; ++i ){
+        NeuronLinear *input_i = new NeuronLinear(0.0, 1.0);  //f(x) = x
+        new_net->add_neuron( input_i );
+        input_set[i] = i;
+    }
+    new_net->specify_input_neurons( input_set );
+
+    /* output neurons */
+    std::vector<size_t> output_set( this->dim_o );
+    for( unsigned int i = 0; i < this->dim_o; ++i ){
+        NeuronLinear *output_i = new NeuronLinear(0.0, 1.0);  //f(x) = x
+        new_net->add_neuron( output_i );
+        output_set[i] = i + this->dim_i;
+    }
+    new_net->specify_output_neurons( output_set );
+
+    /* the new partial derivative has degree of at least one */
+    for( unsigned int i = 0; i < this->dim_inn; ++i ){
+        if( derivative_degree == 1 ){
+            NeuronLogistic_d1 *new_inn_neuron = this->solution_inner_neurons->at( i )->get_derivative( );
+            new_net->add_neuron( new_inn_neuron );
+        }
+        else if( derivative_degree == 2 ){
+            NeuronLogistic_d2 *new_inn_neuron = this->solution_inner_neurons->at( i )->get_derivative( )->get_derivative( );
+            new_net->add_neuron( new_inn_neuron );
+        }
+    }
+
+    /* identity neurons serving as a 'glue'*/
+    for(unsigned int i = 0; i < derivative_degree * this->dim_o * this->dim_inn; ++i){
+        NeuronLinear *new_neuron = new NeuronLinear(0.0, 1.0);  //f(x) = x
+        new_net->add_neuron( new_neuron );
+    }
+
+    /* connections between input neurons and inner neurons */
+    unsigned int dim_shift = this->dim_i + this->dim_o;
+    size_t weight_idx = 0;
+    for(unsigned int i = 0; i < this->dim_i; ++i){
+        for(unsigned int j = 0; j < this->dim_inn; ++j){
+//            printf("  adding a connection between input neuron %2d and inner neuron %2d, weight index %d\n", i, j, weight_idx);
+            new_net->add_connection_simple(i, dim_shift + j, weight_idx);
+            weight_idx++;
+        }
+    }
+
+    /* connections between inner neurons and the first set of 'glueing' neurons */
+//    unsigned int glue_shift = dim_shift + this->dim_inn;
+    unsigned int glue_shift = dim_shift + this->dim_inn;
+//    size_t weight_idx_mem = weight_idx;
+    for(unsigned int i = 0; i < this->dim_inn; ++i){
+        for(unsigned int j = 0; j < this->dim_o; ++j){
+//            printf("  adding a connection between inner neuron %2d and glue neuron %2d, weight index %d\n", i, i * this->dim_o + j, weight_idx);
+            new_net->add_connection_simple(dim_shift + i, glue_shift + i * this->dim_o + j, weight_idx );
+            weight_idx++;
+        }
+    }
+
+    if( derivative_degree == 1 ){
+        /* connections from the first set of glueing neurons to the output neurons */
+        unsigned int pi = partial_derivative_indices[0];
+
+        for(unsigned int i = 0; i < this->dim_inn; ++i){
+            unsigned int local_weight_idx = pi * this->dim_inn + i;
+
+            for(unsigned int j = 0; j < this->dim_o; ++j){
+//                printf("  adding a connection between glue neuron %2d and output neuron %2d, weight index %d\n", i * this->dim_o + j, j, local_weight_idx);
+                new_net->add_connection_simple(glue_shift + i * this->dim_o + j, j + this->dim_i, local_weight_idx );
+            }
+        }
+    }
+    else if( derivative_degree == 2 ){
+        /* connections from the first set of glueing neurons towards the second set of glueing neurons */
+        unsigned int pi = partial_derivative_indices[0];
+        unsigned int glue_shift_2 = this->dim_inn + this->dim_i + this->dim_o + this->dim_inn * this->dim_o;
+
+        for(unsigned int i = 0; i < this->dim_inn; ++i){
+            unsigned int local_weight_idx = pi * this->dim_inn + i;
+
+            for(unsigned int j = 0; j < this->dim_o; ++j){
+//                printf("  adding a connection between glue neuron[1] %2d and glue neuron[2] %2d, weight index %d\n", i * this->dim_o + j, i * this->dim_o + j, local_weight_idx);
+                new_net->add_connection_simple(glue_shift + i * this->dim_o + j, glue_shift_2 + i * this->dim_o + j, local_weight_idx );
+            }
+        }
+
+        /* connections from the second set of glueing neurons towards the output neurons */
+        pi = partial_derivative_indices[1];
+
+        for(unsigned int i = 0; i < this->dim_inn; ++i){
+            unsigned int local_weight_idx = pi * this->dim_inn + i;
+
+            for(unsigned int j = 0; j < this->dim_o; ++j){
+//                printf("  adding a connection between glue neuron[2] %2d and output neuron %2d, weight index %d\n", i * this->dim_o + j, j, local_weight_idx);
+                new_net->add_connection_simple(glue_shift_2 + i * this->dim_o + j, j, local_weight_idx );
+            }
+        }
+    }
+    map_multiindices2nn[ alpha ] = new_net;
+
+    this->differential_equations->at( equation_idx )->add_network( new_net, beta );
+}
+
+void DESolver::set_error_function(unsigned int equation_idx, ErrorFunctionType F, DataSet *conditions) {
+    if( equation_idx >= this->n_equations ){
+        throw std::invalid_argument("The parameter 'equation_idx' is too large! It exceeds the number of differential equations.");
+    }
+
+    this->errors_functions_types->at( equation_idx ) = F;
+    this->errors_functions_data_sets->at( equation_idx ) = conditions;
+}
+
+void DESolver::solve_via_particle_swarm(double *domain_bounds, double c1, double c2, double w,
+                                          unsigned int n_particles, unsigned int max_iters, double gamma,
+                                          double epsilon, double delta) {
+
+    NeuralNetwork *nn;
+    DataSet *ds;
+
+    /* DEFINITION OF THE PARTIAL ERROR FUNCTIONS */
+    std::vector<ErrorFunction*> error_functions( this->n_equations );
+    for(unsigned int i = 0; i < this->n_equations; ++i ){
+        nn = this->differential_equations->at( i );
+        ds = this->errors_functions_data_sets->at( i );
+
+        if( this->errors_functions_types->at( i ) == ErrorFunctionType::ErrorFuncMSE ){
+            error_functions[i] = new MSE( nn, ds );
+        }
+        else{
+            //default
+            error_functions[i] = new MSE( nn, ds );
+        }
+    }
+
+    /* DEFINITION OF THE GLOBAL ERROR FUNCTION */
+    ERROR_SUM total_error;
+    for(unsigned int i = 0; i < this->n_equations; ++i ) {
+        total_error.add_error_function( error_functions[i], 1.0 );
+    }
+
+    ParticleSwarm swarm_01(&total_error, domain_bounds, c1, c2, w, n_particles, max_iters);
+
+    swarm_01.optimize(gamma, epsilon, delta);
+
+}
+
+NeuralNetwork* DESolver::get_solution() {
+    return this->solution;
+}
\ No newline at end of file
diff --git a/src/Solvers/DESolver.h b/src/Solvers/DESolver.h
new file mode 100644
index 0000000000000000000000000000000000000000..5165c5c30d48bcf70ef5648349fe71c5c87727eb
--- /dev/null
+++ b/src/Solvers/DESolver.h
@@ -0,0 +1,168 @@
+/**
+ * File containing methods for quick & simple formulation of PDEs as a system of Neural Networks with shared weights
+ *
+ * @author Michal KravÄŤenko
+ * @date 22.7.18 -
+ */
+
+ //TODO incorporate uncertainities as coefficients in NeuralNetworkSum or ErrorSum
+
+#ifndef INC_4NEURO_PDESOLVER_H
+#define INC_4NEURO_PDESOLVER_H
+
+#include <map>
+#include "../DataSet/DataSet.h"
+#include "../Network/NeuralNetwork.h"
+#include "../Network/NeuralNetworkSum.h"
+#include "../ErrorFunction/ErrorFunctions.h"
+#include "../Neuron/NeuronLinear.h"
+#include "../Neuron/NeuronLogistic.h"
+#include "../LearningMethods/ParticleSwarm.h"
+
+/**
+ * class representing a multi-index of partial derivatives
+ */
+class MultiIndex{
+private:
+    /**
+     * total number of variables
+     */
+    unsigned int dim;
+
+    /**
+     * a vector containing degrees of partial derivatives with respect to each variable
+     */
+    std::vector<unsigned int> partial_derivatives_degrees;
+
+public:
+    /**
+     *
+     * @param dimension
+     */
+    MultiIndex(unsigned int dimension);
+
+
+    /**
+     *
+     * @param index
+     * @param value
+     */
+    void set_partial_derivative(unsigned int index, unsigned int value);
+
+    /**
+     *
+     * @return
+     */
+    std::vector<unsigned int>* get_partial_derivatives_degrees( ) ;
+
+
+    /**
+     *
+     * @param rhs
+     * @return
+     */
+    bool operator <(const MultiIndex& rhs) const;
+
+    /**
+     *
+     * @return
+     */
+    std::string to_string( ) const ;
+
+    /**
+     *
+     * @return
+     */
+    unsigned int get_degree( ) const ;
+};
+
+
+
+class DESolver {
+private:
+
+    /* Mapping between used multiindices of partial derivatices and already defined NNs (in order to not define
+     * the same NN multiple times )*/
+    std::map<MultiIndex, NeuralNetwork*> map_multiindices2nn;
+
+    /* A list of the differential equations */
+    std::vector<NeuralNetworkSum*> * differential_equations = nullptr;
+
+    /* Error functions for differential equations */
+    std::vector<ErrorFunctionType> * errors_functions_types = nullptr;
+    std::vector<DataSet*> * errors_functions_data_sets = nullptr;
+
+    /* NN as the unknown function */
+    NeuralNetwork * solution = nullptr;
+
+
+    /* auxilliary variables */
+    std::vector<NeuronLogistic*> *solution_inner_neurons = nullptr;
+    unsigned int dim_i = 0, dim_o = 0, dim_inn = 0, n_equations = 0;
+
+public:
+    /**
+     * The attempted solution will contain 'm' inner neurons between the input neurons and the output neuron
+     * @param n_equations
+     * @param n_inputs
+     * @param m
+     * @param n_outputs
+     */
+    DESolver( unsigned int n_equations, unsigned int n_inputs, unsigned int m, unsigned int n_outputs );
+
+    /**
+     * default destructor
+     */
+    ~DESolver( );
+
+    /**
+     * Adds a new summand multiplied by 'beta' into the 'equation_idx'-th differential equation
+     * @param equation_idx
+     * @param alpha
+     * @param beta
+     */
+    void add_to_differential_equation( unsigned int equation_idx, MultiIndex &alpha, double beta );
+
+    /**
+     * Sets the error function for the differential equation with the corresponding index
+     * @param equation_idx
+     * @param F
+     * @param conditions
+     */
+    void set_error_function(unsigned int equation_idx, ErrorFunctionType F, DataSet *conditions);
+
+
+     /**
+      * solves the PDE with its boundary conditions via the particle swarm algorithm
+      * @param domain_bounds
+      * @param c1
+      * @param c2
+      * @param w
+      * @param n_particles
+      * @param max_iters
+      * @param gamma
+      * @param epsilon
+      * @param delta
+      * @return
+      */
+    void solve_via_particle_swarm(
+            double * domain_bounds,
+            double c1,
+            double c2,
+            double w,
+            unsigned int n_particles,
+            unsigned int max_iters,
+            double gamma,
+            double epsilon,
+            double delta
+            );
+
+    /**
+     * returns the pointer to the object representing the solution y(x_1, x_2, ..., x_n)
+     * @return
+     */
+    NeuralNetwork* get_solution( );
+};
+
+
+#endif //INC_4NEURO_PDESOLVER_H
diff --git a/src/Solvers/PDESolver.cpp b/src/Solvers/PDESolver.cpp
deleted file mode 100644
index 86030da681bff15f7f979de020f5b03d94d075b1..0000000000000000000000000000000000000000
--- a/src/Solvers/PDESolver.cpp
+++ /dev/null
@@ -1,8 +0,0 @@
-/**
- * DESCRIPTION OF THE FILE
- *
- * @author Michal KravÄŤenko
- * @date 22.7.18 - 
- */
-
-#include "PDESolver.h"
diff --git a/src/Solvers/PDESolver.h b/src/Solvers/PDESolver.h
deleted file mode 100644
index 74c2968036d45048191b8c83f19fb72ddf527ddc..0000000000000000000000000000000000000000
--- a/src/Solvers/PDESolver.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * File containing methods for quick & simple formulation of PDEs as a system of Neural Networks with shared weights
- *
- * @author Michal KravÄŤenko
- * @date 22.7.18 -
- */
-
-#ifndef INC_4NEURO_PDESOLVER_H
-#define INC_4NEURO_PDESOLVER_H
-
-
-#include "../DataSet/DataSet.h"
-#include "../Network/NeuralNetwork.h"
-
-/**
- * class representing a multi-index of partial derivatives
- */
-struct MultiIndex{
-    /**
-     * total number of variables
-     */
-    unsigned int dim;
-
-    /**
-     * a vector containing degrees of partial derivatives with respect to each variable
-     */
-    std::vector<unsigned int> partial_derivatives_degrees;
-};
-
-class PDESolver {
-
-public:
-    /**
-     * default constructor
-     */
-    PDESolver();
-
-    /**
-     * default destructor
-     */
-    ~PDESolver();
-
-    /**
-     * adds a new boundary condition in the form of
-     * [d^{k}/d^{alpha}{x_{alpha_1}x_{alpha_2}...x_{alpha_k}} y(x_1, x_2, ..., x_n) = f(x_1, x_2, ..., x_n)]
-     * to the system.
-     * @param alpha
-     * @param conditions
-     */
-    void add_boundary_condition(MultiIndex &alpha, DataSet &conditions);
-
-
-    /**
-     * adds a new partial derivative in the form of
-     * [d^{k}/d^{alpha}{x_{alpha_1}x_{alpha_2}...x_{alpha_k}} y(x_1, x_2, ..., x_n)] * beta
-     * to the PDE.
-     * @param alpha
-     * @param beta
-     */
-    void add_to_equation(MultiIndex &alpha, double beta);
-
-    /**
-     * sets the right hand side so PDE(x_1, x_2, ..., x_n) = rhs(x_1, x_2, ..., x_n)
-     * @param rhs
-     */
-    void set_quation_rhs(DataSet &rhs);
-
-    /**
-     * solves the PDE with its boundary conditions via the particle swarm algorithm
-     */
-    void solve_particle_swarm( );
-
-    /**
-     * returns the pointer to the object representing the solution y(x_1, x_2, ..., x_n)
-     * @return
-     */
-    NeuralNetwork* get_solution( );
-};
-
-
-#endif //INC_4NEURO_PDESOLVER_H
diff --git a/src/net_test_3.cpp b/src/net_test_3.cpp
index b245e035d4e4a52f7e0a3a3e4ad1e485f36d0aac..3f3f2cf666a5313874bd4dd83e4d20918a541955 100644
--- a/src/net_test_3.cpp
+++ b/src/net_test_3.cpp
@@ -99,7 +99,7 @@ int main() {
     MSE mse_01(subnet_01, &ds_01);
     MSE mse_02(subnet_02, &ds_02);
 
-    MSE_SUM mse_sum;
+    ERROR_SUM mse_sum;
     mse_sum.add_error_function( &mse_01 );
     mse_sum.add_error_function( &mse_02 );
 
diff --git a/src/net_test_ode_1.cpp b/src/net_test_ode_1.cpp
index 70e07407e430c8d0278b1790a2b2cf41c6f901d7..abafd1c8c221e7971da23374ec57e757f4b6ccf3 100644
--- a/src/net_test_ode_1.cpp
+++ b/src/net_test_ode_1.cpp
@@ -13,16 +13,42 @@
 #include <utility>
 
 #include "../include/4neuro.h"
+#include "Solvers/DESolver.h"
 
 int main() {
 
-    int n_inner_neurons = 4;
+    unsigned int n_inputs = 1;
+    unsigned int n_inner_neurons = 1;
+    unsigned int n_outputs = 1;
+    unsigned int n_equations = 3;
 
+    DESolver solver_01( n_equations, n_inputs, n_inner_neurons, n_outputs );
+
+    /* SETUP OF THE EQUATIONS */
+    MultiIndex alpha_0( n_inputs );
+    MultiIndex alpha_1( n_inputs );
+    MultiIndex alpha_2( n_inputs );
+    alpha_2.set_partial_derivative(0, 2);
+    alpha_1.set_partial_derivative(0, 1);
+
+    /* the governing differential equation */
+    solver_01.add_to_differential_equation( 0, alpha_2, 1.0 );
+    solver_01.add_to_differential_equation( 0, alpha_1, 4.0 );
+    solver_01.add_to_differential_equation( 0, alpha_0, 4.0 );
+
+    /* dirichlet boundary condition */
+    solver_01.add_to_differential_equation( 1, alpha_0, 1.0 );
+
+    /* neumann boundary condition */
+    solver_01.add_to_differential_equation( 2, alpha_1, 1.0 );
+
+
+    /* SETUP OF THE TRAINING DATA */
     std::vector<double> inp, out;
 
     double d1_s = 0.0, d1_e = 4.0, frac, alpha;
 
-    /* TRAIN DATA FOR g(t) */
+    /* TRAIN DATA FOR THE GOVERNING DE */
     std::vector<std::pair<std::vector<double>, std::vector<double>>> data_vec_g;
     unsigned int train_size = 100;
 
@@ -42,136 +68,37 @@ int main() {
 //        out = {0.0};
 //        data_vec_g.emplace_back(std::make_pair(inp, out));
 //    }
-    DataSet ds_g(&data_vec_g);
+    DataSet ds_00(&data_vec_g);
 
     /* TRAIN DATA FOR DIRICHLET BC */
     std::vector<std::pair<std::vector<double>, std::vector<double>>> data_vec_y;
     inp = {0.0};
     out = {1.0};
     data_vec_y.emplace_back(std::make_pair(inp, out));
-    DataSet ds_y(&data_vec_y);
+    DataSet ds_01(&data_vec_y);
 
     /* TRAIN DATA FOR NEUMANN BC */
     std::vector<std::pair<std::vector<double>, std::vector<double>>> data_vec_dy;
     inp = {0.0};
     out = {1.0};
     data_vec_dy.emplace_back(std::make_pair(inp, out));
-    DataSet ds_dy(&data_vec_dy);
-
-    /* DEFINITION OF NNs REPRESENTING y(t) and its derivatives */
-    NeuralNetwork y;
-    NeuralNetwork dy;
-    NeuralNetwork ddy;
-
-    /* Input neurons*/
-    NeuronLinear *i_y = new NeuronLinear(0.0, 1.0);  //f(x) = x
-    NeuronLinear *i_dy = new NeuronLinear(0.0, 1.0);  //f(x) = x
-    NeuronLinear *i_ddy = new NeuronLinear(0.0, 1.0);  //f(x) = x
-
-    /* Output neurons*/
-    NeuronLinear *o_y = new NeuronLinear(0.0, 1.0);  //f(x) = x
-    NeuronLinear *o_dy = new NeuronLinear(0.0, 1.0);  //f(x) = x
-    NeuronLinear *o_ddy = new NeuronLinear(0.0, 1.0);  //f(x) = x
-
-    int idxi = y.add_neuron(i_y);
-    int idxo = y.add_neuron(o_y);
-    int idxinn, idxinn2;
-
-    dy.add_neuron(i_dy);
-    dy.add_neuron(o_dy);
-    ddy.add_neuron(i_ddy);
-    ddy.add_neuron(o_ddy);
-
-    std::vector<double> *weight_ptr;
-
-    weight_ptr = y.get_weights_ptr( );
-
-    dy.set_weight_array( weight_ptr );
-    ddy.set_weight_array( weight_ptr );
-
-    size_t conn_1_idx, conn_2_idx;
-    for( int i = 0; i < n_inner_neurons; ++i ){
-        /* Inner neurons and connections of y(t) */
-        NeuronLogistic *inn_i = new NeuronLogistic(1.0, 0.0); //f(x) = 1.0 / (1.0 + e^(-x))
-        idxinn = y.add_neuron( inn_i );
-
-        conn_1_idx = y.add_connection_simple(idxi, idxinn);
-        conn_2_idx = y.add_connection_simple(idxinn, idxo);
-
-
-        //TODO INCORRECT neurons, must be derivatives of Logistic function
-        /* Inner neurons and connections of dy(t), SHARING WEIGHTS */
-        NeuronLogistic *inn_i_dy = new NeuronLogistic(1.0, 0.0); //f(x) = 1.0 / (1.0 + e^(-x))
-        idxinn = dy.add_neuron( inn_i_dy );
-        dy.add_connection_simple(idxi, idxinn, conn_1_idx);
-        NeuronLinear *identity_dy = new NeuronLinear(0.0, 1.0); //f(x) = x
-        idxinn2 = dy.add_neuron( identity_dy );
-        dy.add_connection_simple(idxinn, idxinn2, conn_2_idx);
-        dy.add_connection_shared_power1(idxinn2, idxo, conn_1_idx);
-
-        /* Inner neurons and connections of ddy(t), SHARING WEIGHTS */
-        NeuronLogistic *inn_i_ddy = new NeuronLogistic(1.0, 0.0); //f(x) = 1.0 / (1.0 + e^(-x))
-        idxinn = ddy.add_neuron( inn_i_ddy );
-        ddy.add_connection_simple(idxi, idxinn, conn_1_idx);
-        NeuronLinear *identity_ddy = new NeuronLinear(0.0, 1.0); //f(x) = x
-        idxinn2 = ddy.add_neuron( identity_ddy );
-        ddy.add_connection_simple(idxinn, idxinn2, conn_2_idx);
-        ddy.add_connection_shared_power2(idxinn2, idxo, conn_1_idx);
-    }
-
-    y.randomize_weights();
+    DataSet ds_02(&data_vec_dy);
 
-    /* SPECIFICATION OF INPUTS */
-    std::vector<size_t> net_input_neurons_indices(1);
-    std::vector<size_t> net_output_neurons_indices(1);
-    net_input_neurons_indices[0] = idxi;
-    net_output_neurons_indices[0] = idxo;
-    y.specify_input_neurons(net_input_neurons_indices);
-    y.specify_output_neurons(net_output_neurons_indices);
+    /* Placing the conditions into the solver */
+    solver_01.set_error_function( 0, ErrorFunctionType::ErrorFuncMSE, &ds_00 );
+    solver_01.set_error_function( 1, ErrorFunctionType::ErrorFuncMSE, &ds_01 );
+    solver_01.set_error_function( 2, ErrorFunctionType::ErrorFuncMSE, &ds_02 );
 
-    dy.specify_input_neurons(net_input_neurons_indices);
-    dy.specify_output_neurons(net_output_neurons_indices);
 
-    ddy.specify_input_neurons(net_input_neurons_indices);
-    ddy.specify_output_neurons(net_output_neurons_indices);
 
 
-//    y.print_weights();
-//    dy.print_weights();
-//    ddy.print_weights();
-
-//    y.print_stats();
-//    dy.print_stats();
-//    ddy.print_stats();
-
-    /* DEFINITION OF NN SUM */
-    NeuralNetworkSum g;
-    g.add_network(&y, 4.0);
-    g.add_network(&dy, 4.0);
-    g.add_network(&ddy, 1.0);
-
-    /* DEFINITION OF THE PARTIAL ERROR FUNCTIONS */
-    MSE error_y( &y, &ds_y );
-    MSE error_dy( &dy, &ds_dy );
-    MSE error_test_ddy(&ddy, &ds_g);
-    MSE error_g( &g, &ds_g );
-
-    /* DEFINITION OF THE GLOBAL ERROR FUNCTION */
-    MSE_SUM mse_sum;
-    mse_sum.add_error_function( &error_y );
-    mse_sum.add_error_function( &error_dy );
-    mse_sum.add_error_function( &error_g );
-
-//    double weights[2] = {1.0, 1.0};
-//    error_y.eval(weights);
-
-    /* TRAINING METHOD SETUP */
-    unsigned int max_iters = 500;
 
+    /* PARTICLE SWARM TRAINING METHOD SETUP */
+    unsigned int max_iters = 100;
 
     //must encapsulate each of the partial error functions
-    double *domain_bounds = new double[4 * n_inner_neurons];
-    for(int i = 0; i < 2 * n_inner_neurons; ++i){
+    double *domain_bounds = new double[ 2 * n_inner_neurons * (n_inputs + n_outputs) ];
+    for(unsigned int i = 0; i < n_inner_neurons * (n_inputs + n_outputs); ++i){
         domain_bounds[2 * i] = -800.0;
         domain_bounds[2 * i + 1] = 800.0;
     }
@@ -180,13 +107,12 @@ int main() {
 
     unsigned int n_particles = 10;
 
-    ParticleSwarm swarm_01(&mse_sum, domain_bounds, c1, c2, w, n_particles, max_iters);
-//    ParticleSwarm swarm_01(&error_y, domain_bounds, c1, c2, w, n_particles, max_iters);
-//    ParticleSwarm swarm_01(&error_dy, domain_bounds, c1, c2, w, n_particles, max_iters);
-//    ParticleSwarm swarm_01(&error_test_ddy, domain_bounds, c1, c2, w, n_particles, max_iters);
-//    ParticleSwarm swarm_01(&error_g, domain_bounds, c1, c2, w, n_particles, max_iters);
+    double gamma = 0.5, epsilon = 0.02, delta = 0.9;
+
+    solver_01.solve_via_particle_swarm( domain_bounds, c1, c2, w, n_particles, max_iters, gamma, epsilon, delta);
+
+    NeuralNetwork *solution = solver_01.get_solution();
 
-    swarm_01.optimize(0.5, 0.02, 0.9);
 
 
     delete [] domain_bounds;