From e369eac255d44a1b3004fdd651a618ef2d6597fe Mon Sep 17 00:00:00 2001
From: Martin Beseda <martin.beseda@vsb.cz>
Date: Wed, 20 Mar 2019 17:06:14 +0100
Subject: [PATCH] WIP: Fixed memory leak

---
 src/Network/NeuralNetwork.cpp | 87 ++++++++++++++++++-----------------
 src/Network/NeuralNetwork.h   |  2 +-
 2 files changed, 45 insertions(+), 44 deletions(-)

diff --git a/src/Network/NeuralNetwork.cpp b/src/Network/NeuralNetwork.cpp
index 1979eaef..b2e00d76 100644
--- a/src/Network/NeuralNetwork.cpp
+++ b/src/Network/NeuralNetwork.cpp
@@ -21,8 +21,8 @@ namespace lib4neuro {
         this->neuron_biases = new ::std::vector<double>(0);
         this->neuron_bias_indices = new ::std::vector<int>(0);
 
-        this->connection_weights = new ::std::vector<double>(0);
-        this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
+//        this->connection_weights = new ::std::vector<double>(0);
+//        this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
         this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
         this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
 
@@ -85,10 +85,10 @@ namespace lib4neuro {
             this->input_neuron_indices = nullptr;
         }
 
-        if (this->connection_weights && this->delete_weights) {
-            delete this->connection_weights;
-            this->connection_weights = nullptr;
-        }
+//        if (this->connection_weights && this->delete_weights) {
+//            delete this->connection_weights;
+//            this->connection_weights = nullptr;
+//        }
 
         if (this->neuron_biases && this->delete_biases) {
             delete this->neuron_biases;
@@ -462,9 +462,9 @@ namespace lib4neuro {
                     size_t ci = c.second;
 
                     this->neuron_potentials->at(ti) +=
-                            this->connection_list.at(ci)->eval(*this->connection_weights) * potential;
+                            this->connection_list.at(ci)->eval(this->connection_weights) * potential;
 
-                    std::cout << "  adding input to neuron " << ti << " += " << this->connection_list.at(ci)->eval(*this->connection_weights) << "*" << potential << std::endl;
+                    std::cout << "  adding input to neuron " << ti << " += " << this->connection_list.at(ci)->eval(this->connection_weights) << "*" << potential << std::endl;
                 }
             }
         }
@@ -494,10 +494,10 @@ namespace lib4neuro {
 //            con_weight_u1u2 = new ConnectionFunctionIdentity();
         } else {
             if (sct == SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT) {
-                weight_idx = this->connection_weights->size();
-                this->connection_weights->resize(weight_idx + 1);
+                weight_idx = this->connection_weights.size();
+                this->connection_weights.resize(weight_idx + 1);
             } else if (sct == SIMPLE_CONNECTION_TYPE::EXISTING_WEIGHT) {
-                if (weight_idx >= this->connection_weights->size()) {
+                if (weight_idx >= this->connection_weights.size()) {
                     ::std::cerr << "The supplied connection weight index is too large!\n" << ::std::endl;
                 }
             }
@@ -544,20 +544,21 @@ namespace lib4neuro {
 
     void NeuralNetwork::copy_parameter_space(std::vector<double> *parameters) {
         if (parameters != nullptr) {
-            for (unsigned int i = 0; i < this->connection_weights->size(); ++i) {
-                (*this->connection_weights).at(i) = (*parameters).at(i);
+            for (unsigned int i = 0; i < this->connection_weights.size(); ++i) {
+                this->connection_weights.at(i) = (*parameters).at(i);
             }
 
             for (unsigned int i = 0; i < this->neuron_biases->size(); ++i) {
-                (*this->neuron_biases).at(i) = (*parameters).at(i + this->connection_weights->size());
+                (*this->neuron_biases).at(i) = (*parameters).at(i + this->connection_weights.size());
             }
         }
     }
 
     void NeuralNetwork::set_parameter_space_pointers(NeuralNetwork &parent_network) {
 
-        if (this->connection_weights) {
-            delete connection_weights;
+        if (!this->connection_weights.empty()) {
+//            delete connection_weights;
+            this->connection_weights.clear();
         }
 
         if (this->neuron_biases) {
@@ -620,7 +621,7 @@ namespace lib4neuro {
                     size_t ci = c.second;
 
                     this->neuron_potentials->at(ti) +=
-                            this->connection_list.at(ci)->eval(*this->connection_weights) * potential;
+                            this->connection_list.at(ci)->eval(this->connection_weights) * potential;
                 }
             }
         }
@@ -689,7 +690,7 @@ namespace lib4neuro {
                         size_t ci = c.second;
 
                         neuron_potential_t = this->neurons->at(ti)->get_last_activation_value( );
-                        connection_weight = this->connection_list.at(ci)->eval(*this->connection_weights);
+                        connection_weight = this->connection_list.at(ci)->eval(this->connection_weights);
 
                         this->connection_list.at(ci)->eval_partial_derivative(*this->get_parameter_ptr_weights(),
                                                                                gradient,
@@ -766,7 +767,7 @@ namespace lib4neuro {
                         size_t ci = c.second;
 
                         neuron_activation_t = this->neurons->at(ti)->get_last_activation_value( );
-                        connection_weight = this->connection_list.at(ci)->eval(*this->connection_weights);
+                        connection_weight = this->connection_list.at(ci)->eval(this->connection_weights);
 
                         std::cout << "      [backpropagation] value ("<<ti<< "): " << neuron_activation_t << ", scaling: " << scaling_backprog[neuron_idx] << std::endl;
 
@@ -792,12 +793,12 @@ namespace lib4neuro {
         boost::random::mt19937 gen(std::time(0));
 
         // Init weight guess ("optimal" for logistic activation functions)
-        double r = 4 * sqrt(6. / (this->connection_weights->size()));
+        double r = 4 * sqrt(6. / (this->connection_weights.size()));
 
         boost::random::uniform_real_distribution<> dist(-r, r);
 
-        for (size_t i = 0; i < this->connection_weights->size(); i++) {
-            this->connection_weights->at(i) = dist(gen);
+        for (size_t i = 0; i < this->connection_weights.size(); i++) {
+            this->connection_weights.at(i) = dist(gen);
         }
     }
 
@@ -825,7 +826,7 @@ namespace lib4neuro {
 
     void NeuralNetwork::scale_weights(double alpha) {
         for(size_t i = 0; i < this->get_n_weights(); ++i){
-            this->connection_weights->at( i ) *= alpha;
+            this->connection_weights.at( i ) *= alpha;
         }
     }
 
@@ -843,7 +844,7 @@ namespace lib4neuro {
     }
 
     size_t NeuralNetwork::get_n_weights() {
-        return this->connection_weights->size();
+        return this->connection_weights.size();
     }
 
     size_t NeuralNetwork::get_n_biases() {
@@ -878,11 +879,11 @@ namespace lib4neuro {
 
     void NeuralNetwork::write_weights() {
         std::cout << "Connection weights: ";
-        if (this->connection_weights) {
-            for (size_t i = 0; i < this->connection_weights->size() - 1; ++i) {
-                std::cout << this->connection_weights->at(i) << ", ";
+        if (!this->connection_weights.empty()) {
+            for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
+                std::cout << this->connection_weights.at(i) << ", ";
             }
-            std::cout << this->connection_weights->at(this->connection_weights->size() - 1) << std::endl;
+            std::cout << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
         }
     }
 
@@ -895,22 +896,22 @@ namespace lib4neuro {
 
         ofs << "Connection weights: ";
 
-        if (this->connection_weights) {
-            for (size_t i = 0; i < this->connection_weights->size() - 1; ++i) {
-                ofs << this->connection_weights->at(i) << ", ";
+        if (!this->connection_weights.empty()) {
+            for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
+                ofs << this->connection_weights.at(i) << ", ";
             }
-            ofs << this->connection_weights->at(this->connection_weights->size() - 1) << std::endl;
+            ofs << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
         }
     }
 
     void NeuralNetwork::write_weights(std::ofstream* file_path) {
         *file_path << "Connection weights: ";
 
-        if (this->connection_weights) {
-            for (size_t i = 0; i < this->connection_weights->size() - 1; ++i) {
-                *file_path << this->connection_weights->at(i) << ", ";
+        if (!this->connection_weights.empty()) {
+            for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
+                *file_path << this->connection_weights.at(i) << ", ";
             }
-            *file_path << this->connection_weights->at(this->connection_weights->size() - 1) << std::endl;
+            *file_path << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
         }
     }
 
@@ -957,7 +958,7 @@ namespace lib4neuro {
         ::std::cout << std::flush
                     << "Number of neurons: " << this->neurons->size() << ::std::endl
                     << "Number of connections: " << this->connection_list.size() << ::std::endl
-                    << "Number of active weights: " << this->connection_weights->size() << ::std::endl
+                    << "Number of active weights: " << this->connection_weights.size() << ::std::endl
                     << "Number of active biases: " << this->neuron_biases->size() << ::std::endl;
 
         if(this->normalization_strategy) {
@@ -979,7 +980,7 @@ namespace lib4neuro {
 
         ofs << "Number of neurons: " << this->neurons->size() << ::std::endl
             << "Number of connections: " << this->connection_list.size() << ::std::endl
-            << "Number of active weights: " << this->connection_weights->size() << ::std::endl
+            << "Number of active weights: " << this->connection_weights.size() << ::std::endl
             << "Number of active biases: " << this->neuron_biases->size() << ::std::endl;
 
         if(this->normalization_strategy) {
@@ -996,7 +997,7 @@ namespace lib4neuro {
     void NeuralNetwork::write_stats(std::ofstream* file_path) {
         *file_path << "Number of neurons: " << this->neurons->size() << ::std::endl
                    << "Number of connections: " << this->connection_list.size() << ::std::endl
-                   << "Number of active weights: " << this->connection_weights->size() << ::std::endl
+                   << "Number of active weights: " << this->connection_weights.size() << ::std::endl
                    << "Number of active biases: " << this->neuron_biases->size() << ::std::endl;
 
         if(this->normalization_strategy) {
@@ -1008,12 +1009,12 @@ namespace lib4neuro {
         }
     }
 
-    std::vector<double> *NeuralNetwork::get_parameter_ptr_biases() {
+    std::vector<double>* NeuralNetwork::get_parameter_ptr_biases() {
         return this->neuron_biases;
     }
 
-    std::vector<double> *NeuralNetwork::get_parameter_ptr_weights() {
-        return this->connection_weights;
+    std::vector<double>* NeuralNetwork::get_parameter_ptr_weights() {
+        return &this->connection_weights;
     }
 
 //    size_t NeuralNetwork::add_new_connection_to_list(ConnectionFunctionGeneral *con) {
@@ -1236,7 +1237,7 @@ namespace lib4neuro {
         this->neuron_potentials = new ::std::vector<double>(0);
         this->neuron_bias_indices = new ::std::vector<int>(0);
 
-        this->connection_weights = new ::std::vector<double>(0);
+//        this->connection_weights = new ::std::vector<double>(0);
         this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
         this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
         this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
diff --git a/src/Network/NeuralNetwork.h b/src/Network/NeuralNetwork.h
index f7cc7a16..9f69eb33 100644
--- a/src/Network/NeuralNetwork.h
+++ b/src/Network/NeuralNetwork.h
@@ -70,7 +70,7 @@ namespace lib4neuro {
         /**
          *
          */
-        std::vector<double> *connection_weights = nullptr;
+        std::vector<double> connection_weights; //= nullptr;
 
         /**
          *
-- 
GitLab