diff --git a/src/Network/NeuralNetwork.cpp b/src/Network/NeuralNetwork.cpp
index 098f08ef59d3fbe814203795d194fb95169b2dec..e2adea6dccd99d928bd1a5eb7295603f690667e0 100644
--- a/src/Network/NeuralNetwork.cpp
+++ b/src/Network/NeuralNetwork.cpp
@@ -17,7 +17,7 @@
 
 namespace lib4neuro {
     NeuralNetwork::NeuralNetwork() {
-        this->neurons = new ::std::vector<Neuron *>(0);
+//        this->neurons = new ::std::vector<Neuron *>(0);
 //        this->neuron_biases = new ::std::vector<double>(0);
 //        this->neuron_bias_indices = new ::std::vector<int>(0);
 
@@ -56,14 +56,14 @@ namespace lib4neuro {
 
     NeuralNetwork::~NeuralNetwork() {
 
-        if (this->neurons) {
-            for (auto n: *(this->neurons)) {
-                delete n;
-                n = nullptr;
-            }
-            delete this->neurons;
-            this->neurons = nullptr;
-        }
+//        if (this->neurons) {
+//            for (auto n: *(this->neurons)) {
+//                delete n;
+//                n = nullptr;
+//            }
+//            delete this->neurons;
+//            this->neurons = nullptr;
+//        }
 
 //        if (this->neuron_potentials) {
 //            delete this->neuron_potentials;
@@ -171,7 +171,7 @@ namespace lib4neuro {
 // TODO rework due to the changed structure of the class
 //    Neuron * active_neuron, * target_neuron;
 //
-//    size_t n = this->neurons->size();
+//    size_t n = this->neurons.size();
 //    bool *part_of_subnetwork = new bool[n];
 //    ::std::fill(part_of_subnetwork, part_of_subnetwork + n, false);
 //
@@ -218,7 +218,7 @@ namespace lib4neuro {
 //
 //            is_reachable_from_source[ idx ] = true;
 //
-//            active_neuron = this->neurons->at( idx );
+//            active_neuron = this->neurons.at( idx );
 //
 //            for(Connection* connection: *(active_neuron->get_connections_out())){
 //
@@ -267,7 +267,7 @@ namespace lib4neuro {
 //
 //            is_reachable_from_destination[ idx ] = true;
 //
-//            active_neuron = this->neurons->at( idx );
+//            active_neuron = this->neurons.at( idx );
 //
 //            if(is_reachable_from_source[ idx ]){
 //                n_new_neurons++;
@@ -332,11 +332,11 @@ namespace lib4neuro {
 //                neuron_local_mapping[i] = (int)idx;
 //                idx++;
 //
-//                Neuron *new_neuron = this->neurons->at(i)->get_copy( );
+//                Neuron *new_neuron = this->neurons.at(i)->get_copy( );
 //
 //                output_net->add_neuron( new_neuron );
 //                local_local_n_arr.push_back( new_neuron );
-//                local_n_arr.push_back( this->neurons->at(i) );
+//                local_n_arr.push_back( this->neurons.at(i) );
 //            }
 //        }
 //        for(size_t in: input_neuron_indices){
@@ -401,13 +401,13 @@ namespace lib4neuro {
             this->neuron_bias_indices.push_back((int) bias_idx);
         }
 
-        this->outward_adjacency.push_back(new ::std::vector<std::pair<size_t, size_t>>(0));
-        this->inward_adjacency.push_back(new ::std::vector<std::pair<size_t, size_t>>(0));
+        this->outward_adjacency.push_back(std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0)));
+        this->inward_adjacency.push_back(std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0)));
 
-        this->neurons->push_back(n);
+        this->neurons.push_back(n);
 
         this->layers_analyzed = false;
-        return this->neurons->size() - 1;
+        return this->neurons.size() - 1;
     }
 
     void NeuralNetwork::eval_single_debug(::std::vector<double> &input, ::std::vector<double> &output,
@@ -454,7 +454,7 @@ namespace lib4neuro {
                 if (bias_idx >= 0) {
                     bias = this->neuron_biases.at(bias_idx);
                 }
-                potential = this->neurons->at(si)->activate(this->neuron_potentials.at(si), bias);
+                potential = this->neurons.at(si)->activate(this->neuron_potentials.at(si), bias);
                 std::cout << "  applying bias: " << bias << " to neuron potential: " << this->neuron_potentials.at(si) << " -> " << potential << std::endl;
 
                 for (auto c: *this->outward_adjacency.at(si)) {
@@ -476,7 +476,7 @@ namespace lib4neuro {
             if (bias_idx >= 0) {
                 bias = this->neuron_biases.at(bias_idx);
             }
-            output[i] = this->neurons->at(oi)->activate(this->neuron_potentials.at(oi), bias);
+            output[i] = this->neurons.at(oi)->activate(this->neuron_potentials.at(oi), bias);
             std::cout << "setting the output[" << i << "] = " << output[i] << "(bias = " << bias << ")" << std::endl;
             ++i;
         }
@@ -615,7 +615,7 @@ namespace lib4neuro {
                 if (bias_idx >= 0) {
                     bias = this->neuron_biases.at(bias_idx);
                 }
-                potential = this->neurons->at(si)->activate(this->neuron_potentials.at(si), bias);
+                potential = this->neurons.at(si)->activate(this->neuron_potentials.at(si), bias);
 
                 for (auto c: *this->outward_adjacency.at(si)) {
                     size_t ti = c.first;
@@ -634,7 +634,7 @@ namespace lib4neuro {
             if (bias_idx >= 0) {
                 bias = this->neuron_biases.at(bias_idx);
             }
-            output[i] = this->neurons->at(oi)->activate(this->neuron_potentials.at(oi), bias);
+            output[i] = this->neurons.at(oi)->activate(this->neuron_potentials.at(oi), bias);
             ++i;
         }
     }
@@ -653,8 +653,7 @@ namespace lib4neuro {
         NeuronDifferentiable *active_neuron;
 
         /* initial error propagation */
-        ::std::vector<size_t> *current_layer = this->neuron_layers_feedforward.at(
-                this->neuron_layers_feedforward.size() - 1);
+        std::shared_ptr<::std::vector<size_t>> current_layer = this->neuron_layers_feedforward.at(this->neuron_layers_feedforward.size() - 1);
         //TODO might not work in the future as the output neurons could be permuted
         for (size_t i = 0; i < current_layer->size(); ++i) {
             neuron_idx = current_layer->at(i);
@@ -669,7 +668,7 @@ namespace lib4neuro {
             for (size_t i = 0; i < current_layer->size(); ++i) {
 
                 neuron_idx = current_layer->at(i);
-                active_neuron = dynamic_cast<NeuronDifferentiable *> (this->neurons->at(neuron_idx));
+                active_neuron = dynamic_cast<NeuronDifferentiable *> (this->neurons.at(neuron_idx));
 
                 if (active_neuron) {
                     bias_idx = this->neuron_bias_indices.at(neuron_idx);
@@ -690,7 +689,7 @@ namespace lib4neuro {
                         size_t ti = c.first;
                         size_t ci = c.second;
 
-                        neuron_potential_t = this->neurons->at(ti)->get_last_activation_value( );
+                        neuron_potential_t = this->neurons.at(ti)->get_last_activation_value( );
                         connection_weight = this->connection_list.at(ci)->eval(this->connection_weights);
 
                         this->connection_list.at(ci)->eval_partial_derivative(*this->get_parameter_ptr_weights(),
@@ -722,7 +721,7 @@ namespace lib4neuro {
         NeuronDifferentiable *active_neuron;
 
         /* initial error propagation */
-        ::std::vector<size_t> *current_layer = this->neuron_layers_feedforward.at(
+        std::shared_ptr<::std::vector<size_t>> current_layer = this->neuron_layers_feedforward.at(
                 this->neuron_layers_feedforward.size() - 1);
         //TODO might not work in the future as the output neurons could be permuted
         std::cout << "Error scaling on the output layer: ";
@@ -742,7 +741,7 @@ namespace lib4neuro {
             for (size_t i = 0; i < current_layer->size(); ++i) {
 
                 neuron_idx = current_layer->at(i);
-                active_neuron = dynamic_cast<NeuronDifferentiable *> (this->neurons->at(neuron_idx));
+                active_neuron = dynamic_cast<NeuronDifferentiable *> (this->neurons.at(neuron_idx));
 
                 if (active_neuron) {
                     std::cout << "  [backpropagation] active neuron: " << neuron_idx << std::endl;
@@ -767,7 +766,7 @@ namespace lib4neuro {
                         size_t ti = c.first;
                         size_t ci = c.second;
 
-                        neuron_activation_t = this->neurons->at(ti)->get_last_activation_value( );
+                        neuron_activation_t = this->neurons.at(ti)->get_last_activation_value( );
                         connection_weight = this->connection_list.at(ci)->eval(this->connection_weights);
 
                         std::cout << "      [backpropagation] value ("<<ti<< "): " << neuron_activation_t << ", scaling: " << scaling_backprog[neuron_idx] << std::endl;
@@ -857,7 +856,7 @@ namespace lib4neuro {
     }
 
     size_t NeuralNetwork::get_n_neurons() {
-        return this->neurons->size();
+        return this->neurons.size();
     }
 
     void NeuralNetwork::specify_input_neurons(std::vector<size_t> &input_neurons_indices) {
@@ -960,7 +959,7 @@ namespace lib4neuro {
 
     void NeuralNetwork::write_stats() {
         ::std::cout << std::flush
-                    << "Number of neurons: " << this->neurons->size() << ::std::endl
+                    << "Number of neurons: " << this->neurons.size() << ::std::endl
                     << "Number of connections: " << this->connection_list.size() << ::std::endl
                     << "Number of active weights: " << this->connection_weights.size() << ::std::endl
                     << "Number of active biases: " << this->neuron_biases.size() << ::std::endl;
@@ -982,7 +981,7 @@ namespace lib4neuro {
             THROW_RUNTIME_ERROR("File " + file_path + " can not be opened!");
         }
 
-        ofs << "Number of neurons: " << this->neurons->size() << ::std::endl
+        ofs << "Number of neurons: " << this->neurons.size() << ::std::endl
             << "Number of connections: " << this->connection_list.size() << ::std::endl
             << "Number of active weights: " << this->connection_weights.size() << ::std::endl
             << "Number of active biases: " << this->neuron_biases.size() << ::std::endl;
@@ -999,7 +998,7 @@ namespace lib4neuro {
     }
 
     void NeuralNetwork::write_stats(std::ofstream* file_path) {
-        *file_path << "Number of neurons: " << this->neurons->size() << ::std::endl
+        *file_path << "Number of neurons: " << this->neurons.size() << ::std::endl
                    << "Number of connections: " << this->connection_list.size() << ::std::endl
                    << "Number of active weights: " << this->connection_weights.size() << ::std::endl
                    << "Number of active biases: " << this->neuron_biases.size() << ::std::endl;
@@ -1033,14 +1032,14 @@ namespace lib4neuro {
 
     void NeuralNetwork::add_inward_connection(size_t s, size_t t, size_t con_idx) {
         if (!this->inward_adjacency.at(s)) {
-            this->inward_adjacency.at(s) = new ::std::vector<std::pair<size_t, size_t>>(0);
+            this->inward_adjacency.at(s) = std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0));
         }
         this->inward_adjacency.at(s)->push_back(std::pair<size_t, size_t>(t, con_idx));
     }
 
     void NeuralNetwork::add_outward_connection(size_t s, size_t t, size_t con_idx) {
         if (!this->outward_adjacency.at(s)) {
-            this->outward_adjacency.at(s) = new ::std::vector<std::pair<size_t, size_t>>(0);
+            this->outward_adjacency.at(s) = std::make_shared<std::vector<std::pair<size_t, size_t>>>(::std::vector<std::pair<size_t, size_t>>(0));
         }
         this->outward_adjacency.at(s)->push_back(std::pair<size_t, size_t>(t, con_idx));
     }
@@ -1079,7 +1078,7 @@ namespace lib4neuro {
 //    this->neuron_layers_feedbackward = new ::std::vector<std::vector<size_t>*>(0);
 
 
-        auto n = this->neurons->size();
+        auto n = this->neurons.size();
 
         /* helpful counters */
         ::std::vector<size_t> inward_saturation(n);
@@ -1116,7 +1115,7 @@ namespace lib4neuro {
         while (active_set_size[idx1] > 0) {
 
             /* we add the current active set as the new outward layer */
-            ::std::vector<size_t> *new_feedforward_layer = new ::std::vector<size_t>(active_set_size[idx1]);
+            std::shared_ptr<::std::vector<size_t>> new_feedforward_layer = std::make_shared<::std::vector<size_t>>(::std::vector<size_t>(active_set_size[idx1]));
             this->neuron_layers_feedforward.push_back(new_feedforward_layer);
 
             //we iterate through the active neurons and propagate the signal
@@ -1237,13 +1236,13 @@ namespace lib4neuro {
                                          "doesn't specify input and output layers, which are compulsory!");
         }
 
-        this->neurons = new ::std::vector<Neuron *>(0);
+//        this->neurons = new ::std::vector<Neuron *>(0);
 //        this->neuron_biases = new ::std::vector<double>(0);
 //        this->neuron_potentials = new ::std::vector<double>(0);
 //        this->neuron_bias_indices = new ::std::vector<int>(0);
 
 //        this->connection_weights = new ::std::vector<double>(0);
-        this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
+//        this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
 //        this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
 //        this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
 //
diff --git a/src/Network/NeuralNetwork.h b/src/Network/NeuralNetwork.h
index d55e8dcac17b365c3614bd0709872fa3944f453f..f00819b8d1cf811d5a48975e3cb5dded02155a99 100644
--- a/src/Network/NeuralNetwork.h
+++ b/src/Network/NeuralNetwork.h
@@ -55,7 +55,7 @@ namespace lib4neuro {
         /**
          *
          */
-        std::vector<Neuron *> *neurons = nullptr;
+        std::vector<Neuron *> neurons;  // = nullptr;
 
         /**
          *
@@ -95,22 +95,22 @@ namespace lib4neuro {
         /**
          *
          */
-        std::vector<std::vector<std::pair<size_t, size_t>> *> inward_adjacency; // = nullptr;
+        std::vector<std::shared_ptr<std::vector<std::pair<size_t, size_t>>>> inward_adjacency; // = nullptr;
 
         /**
          *
          */
-        std::vector<std::vector<std::pair<size_t, size_t>> *> outward_adjacency; // = nullptr;
+        std::vector<std::shared_ptr<std::vector<std::pair<size_t, size_t>>>> outward_adjacency; // = nullptr;
 
         /**
          *
          */
-        std::vector<std::vector<size_t> *> neuron_layers_feedforward; // = nullptr;
+        std::vector<std::shared_ptr<std::vector<size_t>>> neuron_layers_feedforward; // = nullptr;
 
         /**
          *
          */
-        std::vector<std::vector<size_t> *> neuron_layers_feedbackward; // = nullptr;
+        std::vector<std::shared_ptr<std::vector<size_t>>> neuron_layers_feedbackward; // = nullptr;
 
         /**
         *