diff --git a/.gitignore b/.gitignore
index 73a81409b94ab3a425fdf0af593ab9b5a69c8d94..fdac68d8ba1e72f75193ea6cf3f940b1b9153458 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,3 +16,4 @@ CMakeFiles
 Makefile
 cmake_install.cmake
 lib4neuro.cbp
+_deps
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 09c74da09ea862f2a987dcfe509126dc6d4973e4..efaf61a9f831b5fb47aa35760a12824a41eae398 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -144,3 +144,4 @@ ubuntu_boost_local_static_deps:
 #        "registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
 #  artifacts:
 #    paths: [gl-code-quality-report.json]
+
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 3d41c4f58421aab7ff9971c7177bf538a03e3afa..7f95b12127b782501f87756b21241affa3a34417 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -16,7 +16,7 @@ include(ProcessorCount)
 ProcessorCount(N_CORES)
 if(N_CORES GREATER 1)
     math(EXPR N_CORES "${N_CORES}-1")
-    set(CTEST_BUILD_FLAGS -j${N_CORES})
+    set(CTEST_BUILD_FLAGS -j ${N_CORES})
     set(ENV{N_CORES} ${N_CORES})
     set(ctest_test_args ${ctest_test_args} PARALLEL_LEVEL ${N_CORES})
 endif()
diff --git a/build.sh b/build.sh
index edf7c4ce423c03e27e28d1fb8a6ceffaa296e807..9cfd4e5e9bf851e27100d1af3f542901208f6bb6 100755
--- a/build.sh
+++ b/build.sh
@@ -2,6 +2,8 @@
 
 export CLEAN_AFTER=no
 
+rm -f CMakeCache.txt
+
 cd build_scripts/linux
 export DEPENDENCIES_LINK_TYPE=static
 ./linux_gcc_build_x64_debug_local.sh
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 02709f97f5d48ba478223d40ed75a6a161cb316f..a91df200719c58509e464fe416bea63845e16c2b 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -72,9 +72,22 @@ if ("${BUILD_LIB}" STREQUAL "yes")
 
     # GFortran linking
     set(GFORT "")
-    #if(NOT WIN32)
-    #    set(GFORT gfortran)
-    #endif()
+    if(OpenBLAS_FOUND)
+        message("Linking GFortran because of OpenBLAS...")
+        set(GFORT gfortran)
+    endif()
+
+    if(NOT OpenBLAS_LIBRARIES)
+        set(OpenBLAS_LIBRARIES "")
+    endif()
+
+    if(NOT BLAS_LIBRARIES)
+        set(BLAS_LIBRARIES "")
+    endif()
+
+    if(NOT LAPACK_LIBRARIES)
+        set(LAPACK_LIBRARIES "")
+    endif()
 
     target_link_libraries(
         lib4neuro
diff --git a/src/DataSet/DataSet.cpp b/src/DataSet/DataSet.cpp
index 820c1e61a230633edfe69cacefbb1a4888463671..822f1f400e8eff7c9a5edc5d6fbd96389c93a6af 100644
--- a/src/DataSet/DataSet.cpp
+++ b/src/DataSet/DataSet.cpp
@@ -17,13 +17,10 @@ namespace lib4neuro {
         this->n_elements = 0;
         this->input_dim = 0;
         this->output_dim = 0;
-        this->gen = boost::random::mt19937(std::time(0));
     }
 
     DataSet::DataSet(std::string file_path) {
         std::ifstream ifs(file_path);
-        this->gen = boost::random::mt19937(std::time(0));
-
         if(ifs.is_open()) {
             try {
                 boost::archive::text_iarchive ia(ifs);
@@ -45,7 +42,6 @@ namespace lib4neuro {
         this->data = *data_ptr;
         this->input_dim = this->data[0].first.size();
         this->output_dim = this->data[0].second.size();
-        this->gen = boost::random::mt19937(std::time(0));
 
         if(ns) {
             this->normalization_strategy = ns;
@@ -66,7 +62,6 @@ namespace lib4neuro {
         this->n_elements = 0;
         this->input_dim = 1;
         this->output_dim = 1;
-        this->gen = boost::random::mt19937(std::time(0));
 
         if(ns) {
             this->normalization_strategy = ns;
@@ -87,7 +82,6 @@ namespace lib4neuro {
         this->input_dim = bounds.size() / 2;
         this->output_dim = output_dim;
         this->n_elements = 0;
-        this->gen = boost::random::mt19937(std::time(0));
 
         if(ns) {
             this->normalization_strategy = ns;
@@ -152,7 +146,7 @@ namespace lib4neuro {
         }
 
         for (unsigned int i = 0; i < bounds.size(); i += 2) {
-             if (no_elems_in_one_dim == 1) {
+            if (no_elems_in_one_dim == 1) {
                 frac = 1;
             } else {
                 frac = (bounds[i] - bounds[i+1]) / (no_elems_in_one_dim - 1);
@@ -406,17 +400,19 @@ namespace lib4neuro {
      * Method returning random amount of data pairs between 1-max
      */
     std::vector<std::pair<std::vector<double>, std::vector<double>>> DataSet::get_random_data_batch(size_t max) {
-        if (max <= 0 || max >= this->data.size()) {
+        if (max <= 0) {
             return this->data;
         } else {
             std::vector<std::pair<std::vector<double>, std::vector<double>>> newData;
-            boost::random::uniform_int_distribution<> dist(0, this->data.size() - 1);
+            srand(time(NULL));  //TODO use Mersen twister from Boost
 
+            size_t n_chosen = rand() % std::min(max, this->data.size())+1;
+            n_chosen = max;
             std::vector<size_t> chosens;
             size_t chosen;
 
-            for (int i = 0; i < max; i++) {
-                chosen = dist(gen);
+            for (int i = 0; i < n_chosen; i++) {
+                chosen = rand() % this->data.size();
                 auto it = std::find(chosens.begin(), chosens.end(), chosen);
 
                 if (it != chosens.end()) {
diff --git a/src/DataSet/DataSet.h b/src/DataSet/DataSet.h
index fd20913a88cbd331a3d4ec37a39cf13d2cd182e1..8702313467dadff2acb4358ad9fc87ef0f02f736 100644
--- a/src/DataSet/DataSet.h
+++ b/src/DataSet/DataSet.h
@@ -12,9 +12,6 @@
 #include <string>
 #include <functional>
 #include <limits>
-#include <boost/random/mersenne_twister.hpp>
-#include <boost/random/uniform_int_distribution.hpp>
-#include <ctime>
 
 #include "../settings.h"
 #include "../NormalizationStrategy/NormalizationStrategy.h"
@@ -29,8 +26,6 @@ namespace lib4neuro {
 
     private:
 
-        boost::random::mt19937 gen;
-
         /**
          * Number of elements in the data set
          */
@@ -293,7 +288,7 @@ namespace lib4neuro {
          * @param max
          * @return
          */
-	    LIB4NEURO_API  std::vector<std::pair<std::vector<double>, std::vector<double>>> get_random_data_batch(size_t max);
+        LIB4NEURO_API  std::vector<std::pair<std::vector<double>, std::vector<double>>> get_random_data_batch(size_t max);
     };
 }
 #endif //INC_4NEURO_DATASET_H
diff --git a/src/Network/NeuralNetwork.cpp b/src/Network/NeuralNetwork.cpp
index 263c930eddee5466fabb8dabeae4c7ea5b91a320..262acd3a8624df894a72637940ebf10bdb93ec71 100644
--- a/src/Network/NeuralNetwork.cpp
+++ b/src/Network/NeuralNetwork.cpp
@@ -35,8 +35,6 @@ namespace lib4neuro {
         this->delete_weights = true;
         this->delete_biases = true;
         this->layers_analyzed = false;
-
-        this->gen = boost::random::mt19937(std::time(0));
     }
 
     NeuralNetwork::NeuralNetwork(std::string filepath) {
@@ -547,30 +545,6 @@ namespace lib4neuro {
         this->delete_weights = false;
     }
 
-    void NeuralNetwork::get_jacobian(std::vector<std::vector<double>> &jacobian, std::pair<std::vector<double>, std::vector<double>> &data, std::vector<double> &error) {
-
-        std::vector<double> fv(this->get_n_outputs());
-
-        jacobian.resize(this->get_n_outputs());
-        error.resize(this->get_n_outputs());
-        for(size_t i = 0; i < this->get_n_outputs(); ++i){
-            jacobian[i].resize(this->get_n_weights() + this->get_n_biases());
-            std::fill(jacobian[i].begin(), jacobian[i].end(), 0);
-        }
-
-        this->eval_single( data.first, fv );
-
-        std::vector<double> error_partial(this->get_n_outputs());
-        std::fill(error_partial.begin(), error_partial.end(), 0.0);
-
-        for( size_t i = 0; i < this->get_n_outputs(); ++i){
-            error_partial[i] = 1;
-            this->add_to_gradient_single(data.first, error_partial, 1.0, jacobian[i]);
-            error[i] = data.second[i] - fv[i];
-            error_partial[i] = 0;
-        }
-    }
-
     void NeuralNetwork::eval_single(::std::vector<double>& input,
                                     ::std::vector<double>& output,
                                     ::std::vector<double>* custom_weights_and_biases) {
@@ -789,7 +763,7 @@ namespace lib4neuro {
 
     void NeuralNetwork::randomize_weights() {
 
-
+        boost::random::mt19937 gen(std::time(0));
 
         // Init weight guess ("optimal" for logistic activation functions)
         double r = 4 * sqrt(6. / (this->connection_weights->size()));
@@ -803,7 +777,7 @@ namespace lib4neuro {
 
     void NeuralNetwork::randomize_biases() {
 
-
+        boost::random::mt19937 gen(std::time(0));
 
         // Init weight guess ("optimal" for logistic activation functions)
         boost::random::uniform_real_distribution<> dist(-1, 1);
@@ -1226,8 +1200,6 @@ namespace lib4neuro {
                                          "doesn't specify input and output layers, which are compulsory!");
         }
 
-        this->gen = boost::random::mt19937(std::time(0));
-
         this->neurons = new ::std::vector<Neuron *>(0);
         this->neuron_biases = new ::std::vector<double>(0);
         this->neuron_potentials = new ::std::vector<double>(0);
@@ -1256,8 +1228,8 @@ namespace lib4neuro {
         COUT_DEBUG("# of outputs: " << out_dim << std::endl);
 
         WRITE_TO_OFS_DEBUG(ofs, "Fully connected feed-forward network being constructed:" << std::endl
-                                << "# of inputs: " << inp_dim << std::endl
-                                << "# of outputs: " << out_dim << std::endl);
+                                                                                          << "# of inputs: " << inp_dim << std::endl
+                                                                                          << "# of outputs: " << out_dim << std::endl);
 
         std::vector<size_t> input_layer_neuron_indices;
         std::vector<size_t> previous_layer_neuron_indices;
@@ -1348,5 +1320,30 @@ namespace lib4neuro {
 
         this->analyze_layer_structure();
     }
+
+    void NeuralNetwork::get_jacobian(std::vector<std::vector<double>> &jacobian, std::pair<std::vector<double>, std::vector<double>> &data, std::vector<double> &error) {
+
+        std::vector<double> fv(this->get_n_outputs());
+
+        jacobian.resize(this->get_n_outputs());
+        error.resize(this->get_n_outputs());
+        for(size_t i = 0; i < this->get_n_outputs(); ++i){
+            jacobian[i].resize(this->get_n_weights() + this->get_n_biases());
+            std::fill(jacobian[i].begin(), jacobian[i].end(), 0);
+        }
+
+        this->eval_single( data.first, fv );
+
+        std::vector<double> error_partial(this->get_n_outputs());
+        std::fill(error_partial.begin(), error_partial.end(), 0.0);
+
+        for( size_t i = 0; i < this->get_n_outputs(); ++i){
+            error_partial[i] = 1;
+            this->add_to_gradient_single(data.first, error_partial, 1.0, jacobian[i]);
+            error[i] = data.second[i] - fv[i];
+            error_partial[i] = 0;
+        }
+    }
+
 }
 
diff --git a/src/Network/NeuralNetwork.h b/src/Network/NeuralNetwork.h
index 0b06cd2b6be71028e9bd0a5f9790fa6bfdc73a14..8788a23cc640ad153f8c966db465b6e36a736a01 100644
--- a/src/Network/NeuralNetwork.h
+++ b/src/Network/NeuralNetwork.h
@@ -17,10 +17,6 @@
 #include <algorithm>
 #include <utility>
 #include <fstream>
-#include <boost/random/mersenne_twister.hpp>
-#include <boost/random/uniform_real_distribution.hpp>
-#include <ctime>
-
 
 #include "../settings.h"
 #include "../Neuron/Neuron.h"
@@ -32,8 +28,6 @@
 #include "../NetConnection/ConnectionFunctionIdentity.h"
 #include "../NormalizationStrategy/NormalizationStrategy.h"
 
-
-
 namespace lib4neuro {
 
     /**
@@ -55,9 +49,8 @@ namespace lib4neuro {
      *
      */
     class NeuralNetwork {
-
     protected:
-        boost::random::mt19937 gen;
+
         /**
          *
          */
@@ -169,6 +162,18 @@ namespace lib4neuro {
 
     public:
 
+        /**
+         * Runs @data through the network and then computes partial derivatives with respect to each output function and adds them
+         * to seperate vectors in @jacobian. Also computes the out error and stores in the vector @error
+         * @param[out] jacobian
+         * @param[in] data
+         * @param[out] error
+         */
+        LIB4NEURO_API virtual void
+        get_jacobian(std::vector<std::vector<double>> &jacobian, std::pair<std::vector<double>, std::vector<double>> &data, std::vector<double> &error);
+
+
+
         /**
         *
         * @param input
@@ -279,16 +284,6 @@ namespace lib4neuro {
         LIB4NEURO_API void
         add_existing_connection(size_t n1_idx, size_t n2_idx, size_t connection_idx, NeuralNetwork &parent_network);
 
-        /**
-         * Runs @data through the network and then computes partial derivatives with respect to each output function and adds them
-         * to seperate vectors in @jacobian. Also computes the out error and stores in the vector @error
-         * @param[out] jacobian
-         * @param[in] data
-         * @param[out] error
-         */
-        LIB4NEURO_API virtual void
-        get_jacobian(std::vector<std::vector<double>> &jacobian, std::pair<std::vector<double>, std::vector<double>> &data, std::vector<double> &error);
-
         /**
          *
          */
diff --git a/src/Neuron/Neuron.h b/src/Neuron/Neuron.h
index 6900c36f3d26331b825e7d36f2c56e9a3fc9e555..88747a63ef073600dae1bce06bf9f34b4f4c0c61 100644
--- a/src/Neuron/Neuron.h
+++ b/src/Neuron/Neuron.h
@@ -32,7 +32,7 @@ namespace lib4neuro {
 
     protected:
         /**
-         * Holds the last value of the activation function, used by this->activate
+         * holds the last value of the activation function, used by this->activate
          */
         double activation_val;
 
@@ -64,24 +64,12 @@ namespace lib4neuro {
 
     }; /* end of Neuron class */
 
-    /**
-     * Abstract clas providing the method get_derivative()
-     */
-    class IGetDerivative {
-    public:
-        /**
-         * Returns a Neuron pointer object with activation function being the partial derivative of
-         * the activation function of this Neuron object with respect to the argument, i.e. 'potential'
-         * @return
-         */
-        virtual Neuron* get_derivative() = 0;
-    };
 
-    /**
-     * Class serving as an interface providing 'activation_function_eval_partial_derivative',
-     * 'activation_function_eval_derivative',  'get_partial_derivative' and
-     * 'get_derivative' methods.
-     */
+/**
+ * Class serving as an interface providing 'activation_function_eval_partial_derivative',
+ * 'activation_function_eval_derivative',  'get_partial_derivative' and
+ * 'get_derivative' methods.
+ */
     class NeuronDifferentiable : public Neuron {
     public:
         /**
@@ -96,6 +84,13 @@ namespace lib4neuro {
          * and 'b' is the bias
          */
         virtual double activation_function_eval_derivative_bias(double x, double b) = 0;
+
+        /**
+         * Returns a Neuron pointer object with activation function being the partial derivative of
+         * the activation function of this Neuron object with respect to the argument, i.e. 'potential'
+         * @return
+         */
+        virtual Neuron *get_derivative() = 0;
     };
 
 }
diff --git a/src/Neuron/NeuronBinary.cpp b/src/Neuron/NeuronBinary.cpp
index 652559617ae3025ef3188c6bc89f934dc8d7f94a..9e7bbb80d616a0b56fbfbdecd021e28021ab4b36 100644
--- a/src/Neuron/NeuronBinary.cpp
+++ b/src/Neuron/NeuronBinary.cpp
@@ -22,4 +22,5 @@ namespace lib4neuro {
 
         return this->activation_val;
     }
+
 }
\ No newline at end of file
diff --git a/src/Neuron/NeuronBinary.h b/src/Neuron/NeuronBinary.h
index 717cbab2a9589ee453191a6340749ef7c0c18aae..1992b03fa3495787b9da990ebe54139d1b59c6f6 100644
--- a/src/Neuron/NeuronBinary.h
+++ b/src/Neuron/NeuronBinary.h
@@ -14,9 +14,9 @@
 
 namespace lib4neuro {
 
-    /**
-     *  Binary neuron class - uses unit-step as the activation function
-     */
+/**
+ *  Binary neuron class - uses unit-step as the activation function
+ */
     class NeuronBinary : public Neuron {
 
     public:
diff --git a/src/Neuron/NeuronConstant.h b/src/Neuron/NeuronConstant.h
index b59f6bf38eab837c3d4dfa732a409155c2f8d941..71e43013630f8d5f6bf1caded440113468719505 100644
--- a/src/Neuron/NeuronConstant.h
+++ b/src/Neuron/NeuronConstant.h
@@ -12,7 +12,7 @@
 
 namespace lib4neuro {
 
-    class NeuronConstant : public NeuronDifferentiable, IGetDerivative {
+    class NeuronConstant : public NeuronDifferentiable {
     private:
         double p = 0.0;
 
diff --git a/src/Neuron/NeuronLinear.h b/src/Neuron/NeuronLinear.h
index 2db91345d746435dea2d0fb0dcba44994475f38a..d7043f932180790965b2a55b4ba8285ff949e469 100644
--- a/src/Neuron/NeuronLinear.h
+++ b/src/Neuron/NeuronLinear.h
@@ -18,7 +18,7 @@ namespace lib4neuro {
      * Linear neuron class - uses activation function in the form f(x)=a*x + b,
      * 'x' being the neuron's potential
      */
-    class NeuronLinear:public NeuronDifferentiable, IGetDerivative {
+    class NeuronLinear:public NeuronDifferentiable {
 
     public:
 
diff --git a/src/Neuron/NeuronLogistic.cpp b/src/Neuron/NeuronLogistic.cpp
index 9403784db8bb299dabd408efd9850b8ab6759c86..dd1469d794c4a4dbba8e3c847bed287a76f42674 100644
--- a/src/Neuron/NeuronLogistic.cpp
+++ b/src/Neuron/NeuronLogistic.cpp
@@ -43,10 +43,10 @@ namespace lib4neuro {
         return -this->activation_function_eval_derivative_bias(x, b);
     }
 
-//    NeuronLogistic *NeuronLogistic_d2::get_derivative() {
-//        //TODO maybe not the best way
-//        return nullptr;
-//    }
+    NeuronLogistic *NeuronLogistic_d2::get_derivative() {
+        //TODO maybe not the best way
+        return nullptr;
+    }
 
     NeuronLogistic_d1::NeuronLogistic_d1() {}
 
@@ -78,7 +78,7 @@ namespace lib4neuro {
         return -this->activation_function_eval_derivative_bias(x, b);
     }
 
-    NeuronDifferentiable* NeuronLogistic_d1::get_derivative() {
+    NeuronLogistic *NeuronLogistic_d1::get_derivative() {
         //(e^(b + x) (e^b - e^x))/(e^b + e^x)^3
         NeuronLogistic_d2 *output = nullptr;
 
@@ -113,7 +113,7 @@ namespace lib4neuro {
 
     }
 
-    NeuronDifferentiable* NeuronLogistic::get_derivative() {
+    NeuronLogistic *NeuronLogistic::get_derivative() {
 
         NeuronLogistic_d1 *output = nullptr;
         output = new NeuronLogistic_d1();
diff --git a/src/Neuron/NeuronLogistic.h b/src/Neuron/NeuronLogistic.h
index 45cd4f45b78a2ce7129d00498cc0c0a06f91bc69..9daf384bf05def06eec5038880371d23cf175d39 100644
--- a/src/Neuron/NeuronLogistic.h
+++ b/src/Neuron/NeuronLogistic.h
@@ -16,7 +16,7 @@
 #include "Neuron.h"
 
 namespace lib4neuro {
-    class NeuronLogistic : public NeuronDifferentiable, IGetDerivative {
+    class NeuronLogistic : public NeuronDifferentiable {
 
     public:
 
@@ -54,7 +54,7 @@ namespace lib4neuro {
          * Returns a pointer to a Neuron with derivative as its activation function
          * @return
          */
-        LIB4NEURO_API virtual NeuronDifferentiable* get_derivative() override;
+        LIB4NEURO_API virtual NeuronLogistic *get_derivative() override;
     };
 
 
@@ -78,7 +78,7 @@ namespace lib4neuro {
         /**
          * Evaluates 'e^(b - x)/(e^(b - x) + 1)^2' and returns the result
          */
-        LIB4NEURO_API double activate(double x, double b) override;
+        LIB4NEURO_API virtual double activate(double x, double b) override;
 
         /**
          * Calculates the partial derivative of the activation function
@@ -86,23 +86,23 @@ namespace lib4neuro {
          * @return Partial derivative of the activation function according to the
          * bias, returns: (e^(b + x) (e^x - e^b))/(e^b + e^x)^3
          */
-        LIB4NEURO_API double activation_function_eval_derivative_bias(double x, double b) override;
+        LIB4NEURO_API virtual double activation_function_eval_derivative_bias(double x, double b) override;
 
         /**
          * Calculates d/dx of  e^(b - x)*(1 + e^(b - x))^(-2)
          * @return  (e^(b + x) (e^b - e^x))/(e^b + e^x)^3
          */
-        LIB4NEURO_API double activation_function_eval_derivative(double x, double b) override;
+        LIB4NEURO_API virtual double activation_function_eval_derivative(double x, double b) override;
 
         /**
          * Returns a pointer to a Neuron with derivative as its activation function
          * @return
          */
-        LIB4NEURO_API NeuronDifferentiable* get_derivative() override;
+        LIB4NEURO_API virtual NeuronLogistic *get_derivative() override;
     };
 
 
-    class NeuronLogistic_d2 : public NeuronDifferentiable {
+    class NeuronLogistic_d2 : public NeuronLogistic_d1 {
 
     public:
 
@@ -121,7 +121,7 @@ namespace lib4neuro {
         /**
          * Evaluates '(e^(b + x) (e^b - e^x))/(e^b + e^x)^3' and returns the result
          */
-        LIB4NEURO_API double activate(double x, double b) override;
+        LIB4NEURO_API virtual double activate(double x, double b) override;
 
         /**
          * Calculates the partial derivative of the activation function
@@ -129,19 +129,19 @@ namespace lib4neuro {
          * @return Partial derivative of the activation function according to the
          * bias, returns: -(e^(b + x) (-4 e^(b + x) + e^(2 b) + e^(2 x)))/(e^b + e^x)^4
          */
-        LIB4NEURO_API double activation_function_eval_derivative_bias(double x, double b) override;
+        LIB4NEURO_API virtual double activation_function_eval_derivative_bias(double x, double b) override;
 
         /**
          * Calculates d/dx of  (e^(b + x) (e^b - e^x))/(e^b + e^x)^3
          * @return (e^(b + x) (-4 e^(b + x) + e^(2 b) + e^(2 x)))/(e^b + e^x)^4
          */
-        LIB4NEURO_API double activation_function_eval_derivative(double x, double b) override;
+        LIB4NEURO_API virtual double activation_function_eval_derivative(double x, double b) override;
 
         /**
          *
          * @return
          */
-//        LIB4NEURO_API virtual NeuronLogistic *get_derivative() override;
+        LIB4NEURO_API virtual NeuronLogistic *get_derivative() override;
 
     };
 
diff --git a/src/Neuron/NeuronLogisticSerialization.h b/src/Neuron/NeuronLogisticSerialization.h
index 4bb0311fdefbba8bf73d17aee5a514c3811f4b50..1d65fcba360ff47d9fcc4baeec740c1746f430de 100644
--- a/src/Neuron/NeuronLogisticSerialization.h
+++ b/src/Neuron/NeuronLogisticSerialization.h
@@ -29,14 +29,14 @@ namespace lib4neuro {
     struct NeuronLogistic_d1::access {
         template<class Archive>
         static void serialize(Archive &ar, NeuronLogistic_d1 &n, const unsigned int version) {
-            ar & boost::serialization::base_object<Neuron>(n);
+            ar & boost::serialization::base_object<NeuronLogistic>(n);
         }
     };
 
     struct NeuronLogistic_d2::access {
         template<class Archive>
         static void serialize(Archive &ar, NeuronLogistic_d2 &n, const unsigned int version) {
-            ar & boost::serialization::base_object<Neuron>(n);
+            ar & boost::serialization::base_object<NeuronLogistic_d1>(n);
         }
     };
 
diff --git a/src/Solvers/DESolver.cpp b/src/Solvers/DESolver.cpp
index a963f35aeca5cd2f175a091b6cdf4cd6a361361c..e201c565c3ef2186a3ce87da5741e778c8c2e317 100644
--- a/src/Solvers/DESolver.cpp
+++ b/src/Solvers/DESolver.cpp
@@ -261,7 +261,7 @@ namespace lib4neuro {
             for (size_t j = 0; j < derivative_degree; ++j) {
                 n_ptr2 = n_ptr;
 
-                n_ptr = dynamic_cast<NeuronLogistic*>(n_ptr->get_derivative());
+                n_ptr = n_ptr->get_derivative();
 
                 if (j > 0) {
                     delete n_ptr2;