Skip to content
Snippets Groups Projects
Commit e369eac2 authored by Martin Beseda's avatar Martin Beseda
Browse files

WIP: Fixed memory leak

parent 0e232fed
No related branches found
No related tags found
No related merge requests found
......@@ -21,8 +21,8 @@ namespace lib4neuro {
this->neuron_biases = new ::std::vector<double>(0);
this->neuron_bias_indices = new ::std::vector<int>(0);
this->connection_weights = new ::std::vector<double>(0);
this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
// this->connection_weights = new ::std::vector<double>(0);
// this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
......@@ -85,10 +85,10 @@ namespace lib4neuro {
this->input_neuron_indices = nullptr;
}
if (this->connection_weights && this->delete_weights) {
delete this->connection_weights;
this->connection_weights = nullptr;
}
// if (this->connection_weights && this->delete_weights) {
// delete this->connection_weights;
// this->connection_weights = nullptr;
// }
if (this->neuron_biases && this->delete_biases) {
delete this->neuron_biases;
......@@ -462,9 +462,9 @@ namespace lib4neuro {
size_t ci = c.second;
this->neuron_potentials->at(ti) +=
this->connection_list.at(ci)->eval(*this->connection_weights) * potential;
this->connection_list.at(ci)->eval(this->connection_weights) * potential;
std::cout << " adding input to neuron " << ti << " += " << this->connection_list.at(ci)->eval(*this->connection_weights) << "*" << potential << std::endl;
std::cout << " adding input to neuron " << ti << " += " << this->connection_list.at(ci)->eval(this->connection_weights) << "*" << potential << std::endl;
}
}
}
......@@ -494,10 +494,10 @@ namespace lib4neuro {
// con_weight_u1u2 = new ConnectionFunctionIdentity();
} else {
if (sct == SIMPLE_CONNECTION_TYPE::NEXT_WEIGHT) {
weight_idx = this->connection_weights->size();
this->connection_weights->resize(weight_idx + 1);
weight_idx = this->connection_weights.size();
this->connection_weights.resize(weight_idx + 1);
} else if (sct == SIMPLE_CONNECTION_TYPE::EXISTING_WEIGHT) {
if (weight_idx >= this->connection_weights->size()) {
if (weight_idx >= this->connection_weights.size()) {
::std::cerr << "The supplied connection weight index is too large!\n" << ::std::endl;
}
}
......@@ -544,20 +544,21 @@ namespace lib4neuro {
void NeuralNetwork::copy_parameter_space(std::vector<double> *parameters) {
if (parameters != nullptr) {
for (unsigned int i = 0; i < this->connection_weights->size(); ++i) {
(*this->connection_weights).at(i) = (*parameters).at(i);
for (unsigned int i = 0; i < this->connection_weights.size(); ++i) {
this->connection_weights.at(i) = (*parameters).at(i);
}
for (unsigned int i = 0; i < this->neuron_biases->size(); ++i) {
(*this->neuron_biases).at(i) = (*parameters).at(i + this->connection_weights->size());
(*this->neuron_biases).at(i) = (*parameters).at(i + this->connection_weights.size());
}
}
}
void NeuralNetwork::set_parameter_space_pointers(NeuralNetwork &parent_network) {
if (this->connection_weights) {
delete connection_weights;
if (!this->connection_weights.empty()) {
// delete connection_weights;
this->connection_weights.clear();
}
if (this->neuron_biases) {
......@@ -620,7 +621,7 @@ namespace lib4neuro {
size_t ci = c.second;
this->neuron_potentials->at(ti) +=
this->connection_list.at(ci)->eval(*this->connection_weights) * potential;
this->connection_list.at(ci)->eval(this->connection_weights) * potential;
}
}
}
......@@ -689,7 +690,7 @@ namespace lib4neuro {
size_t ci = c.second;
neuron_potential_t = this->neurons->at(ti)->get_last_activation_value( );
connection_weight = this->connection_list.at(ci)->eval(*this->connection_weights);
connection_weight = this->connection_list.at(ci)->eval(this->connection_weights);
this->connection_list.at(ci)->eval_partial_derivative(*this->get_parameter_ptr_weights(),
gradient,
......@@ -766,7 +767,7 @@ namespace lib4neuro {
size_t ci = c.second;
neuron_activation_t = this->neurons->at(ti)->get_last_activation_value( );
connection_weight = this->connection_list.at(ci)->eval(*this->connection_weights);
connection_weight = this->connection_list.at(ci)->eval(this->connection_weights);
std::cout << " [backpropagation] value ("<<ti<< "): " << neuron_activation_t << ", scaling: " << scaling_backprog[neuron_idx] << std::endl;
......@@ -792,12 +793,12 @@ namespace lib4neuro {
boost::random::mt19937 gen(std::time(0));
// Init weight guess ("optimal" for logistic activation functions)
double r = 4 * sqrt(6. / (this->connection_weights->size()));
double r = 4 * sqrt(6. / (this->connection_weights.size()));
boost::random::uniform_real_distribution<> dist(-r, r);
for (size_t i = 0; i < this->connection_weights->size(); i++) {
this->connection_weights->at(i) = dist(gen);
for (size_t i = 0; i < this->connection_weights.size(); i++) {
this->connection_weights.at(i) = dist(gen);
}
}
......@@ -825,7 +826,7 @@ namespace lib4neuro {
void NeuralNetwork::scale_weights(double alpha) {
for(size_t i = 0; i < this->get_n_weights(); ++i){
this->connection_weights->at( i ) *= alpha;
this->connection_weights.at( i ) *= alpha;
}
}
......@@ -843,7 +844,7 @@ namespace lib4neuro {
}
size_t NeuralNetwork::get_n_weights() {
return this->connection_weights->size();
return this->connection_weights.size();
}
size_t NeuralNetwork::get_n_biases() {
......@@ -878,11 +879,11 @@ namespace lib4neuro {
void NeuralNetwork::write_weights() {
std::cout << "Connection weights: ";
if (this->connection_weights) {
for (size_t i = 0; i < this->connection_weights->size() - 1; ++i) {
std::cout << this->connection_weights->at(i) << ", ";
if (!this->connection_weights.empty()) {
for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
std::cout << this->connection_weights.at(i) << ", ";
}
std::cout << this->connection_weights->at(this->connection_weights->size() - 1) << std::endl;
std::cout << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
}
}
......@@ -895,22 +896,22 @@ namespace lib4neuro {
ofs << "Connection weights: ";
if (this->connection_weights) {
for (size_t i = 0; i < this->connection_weights->size() - 1; ++i) {
ofs << this->connection_weights->at(i) << ", ";
if (!this->connection_weights.empty()) {
for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
ofs << this->connection_weights.at(i) << ", ";
}
ofs << this->connection_weights->at(this->connection_weights->size() - 1) << std::endl;
ofs << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
}
}
void NeuralNetwork::write_weights(std::ofstream* file_path) {
*file_path << "Connection weights: ";
if (this->connection_weights) {
for (size_t i = 0; i < this->connection_weights->size() - 1; ++i) {
*file_path << this->connection_weights->at(i) << ", ";
if (!this->connection_weights.empty()) {
for (size_t i = 0; i < this->connection_weights.size() - 1; ++i) {
*file_path << this->connection_weights.at(i) << ", ";
}
*file_path << this->connection_weights->at(this->connection_weights->size() - 1) << std::endl;
*file_path << this->connection_weights.at(this->connection_weights.size() - 1) << std::endl;
}
}
......@@ -957,7 +958,7 @@ namespace lib4neuro {
::std::cout << std::flush
<< "Number of neurons: " << this->neurons->size() << ::std::endl
<< "Number of connections: " << this->connection_list.size() << ::std::endl
<< "Number of active weights: " << this->connection_weights->size() << ::std::endl
<< "Number of active weights: " << this->connection_weights.size() << ::std::endl
<< "Number of active biases: " << this->neuron_biases->size() << ::std::endl;
if(this->normalization_strategy) {
......@@ -979,7 +980,7 @@ namespace lib4neuro {
ofs << "Number of neurons: " << this->neurons->size() << ::std::endl
<< "Number of connections: " << this->connection_list.size() << ::std::endl
<< "Number of active weights: " << this->connection_weights->size() << ::std::endl
<< "Number of active weights: " << this->connection_weights.size() << ::std::endl
<< "Number of active biases: " << this->neuron_biases->size() << ::std::endl;
if(this->normalization_strategy) {
......@@ -996,7 +997,7 @@ namespace lib4neuro {
void NeuralNetwork::write_stats(std::ofstream* file_path) {
*file_path << "Number of neurons: " << this->neurons->size() << ::std::endl
<< "Number of connections: " << this->connection_list.size() << ::std::endl
<< "Number of active weights: " << this->connection_weights->size() << ::std::endl
<< "Number of active weights: " << this->connection_weights.size() << ::std::endl
<< "Number of active biases: " << this->neuron_biases->size() << ::std::endl;
if(this->normalization_strategy) {
......@@ -1008,12 +1009,12 @@ namespace lib4neuro {
}
}
std::vector<double> *NeuralNetwork::get_parameter_ptr_biases() {
std::vector<double>* NeuralNetwork::get_parameter_ptr_biases() {
return this->neuron_biases;
}
std::vector<double> *NeuralNetwork::get_parameter_ptr_weights() {
return this->connection_weights;
std::vector<double>* NeuralNetwork::get_parameter_ptr_weights() {
return &this->connection_weights;
}
// size_t NeuralNetwork::add_new_connection_to_list(ConnectionFunctionGeneral *con) {
......@@ -1236,7 +1237,7 @@ namespace lib4neuro {
this->neuron_potentials = new ::std::vector<double>(0);
this->neuron_bias_indices = new ::std::vector<int>(0);
this->connection_weights = new ::std::vector<double>(0);
// this->connection_weights = new ::std::vector<double>(0);
this->connection_list = ::std::vector<std::shared_ptr<ConnectionFunctionGeneral>>(0);
this->inward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
this->outward_adjacency = new ::std::vector<std::vector<std::pair<size_t, size_t>> *>(0);
......
......@@ -70,7 +70,7 @@ namespace lib4neuro {
/**
*
*/
std::vector<double> *connection_weights = nullptr;
std::vector<double> connection_weights; //= nullptr;
/**
*
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment