Skip to content
Snippets Groups Projects
Commit f00e368e authored by kra568's avatar kra568
Browse files

[ENH] added a possibility to alter the indexing of weights in the connection object

parent 53ea8bce
No related branches found
No related tags found
No related merge requests found
......@@ -9,8 +9,4 @@ namespace lib4neuro {
}
double Neuron::get_last_activation_value() {
return this->activation_val;
}
}
......@@ -57,12 +57,6 @@ namespace lib4neuro {
LIB4NEURO_API virtual double activate(double x,
double b) = 0;
/**
* returns the last value of the actual activation function output for this neuron
* @return
*/
LIB4NEURO_API virtual double get_last_activation_value();
}; /* end of Neuron class */
......
......@@ -16,8 +16,6 @@ namespace lib4neuro {
double NeuronAbsolute::activate(double x,
double b) {
//this->activation_val = abs(x + b);
this->activation_val = pow((x + b),2);
return this->activation_val;
}
......
......@@ -14,12 +14,10 @@ namespace lib4neuro {
double b) {
if (x >= b) {
this->activation_val = 1.0;
return 1.0;
} else {
this->activation_val = 0.0;
return 0.0;
}
return this->activation_val;
}
}
\ No newline at end of file
......@@ -17,12 +17,10 @@ namespace lib4neuro {
double b) {
if (x >= this->bias) {
this->activation_val = 1.0;
return 1.0;
} else {
this->activation_val = 0.0;
return 0.0;
}
return this->activation_val;
}
double NeuronBinaryBiased::activation_function_eval_derivative_bias(double x,
......
......@@ -20,8 +20,7 @@ namespace lib4neuro {
double NeuronConstant::activate(double x,
double b) {
this->activation_val = this->p;
return this->activation_val;
return this->p;
}
double NeuronConstant::activation_function_eval_derivative_bias(double x,
......
......@@ -17,8 +17,7 @@ namespace lib4neuro {
double NeuronFilter::activate(double x,
double b) {
this->activation_val = ((x + this->bias) > 0) ? x : 0.0;
return this->activation_val;
return ((x + this->bias) > 0) ? x : 0.0;
}
double NeuronFilter::activation_function_eval_derivative_bias(double x,
......
......@@ -13,8 +13,7 @@ namespace lib4neuro {
double NeuronLinear::activate(double x,
double b) {
this->activation_val = x + b;
return this->activation_val;
return x + b;
}
double NeuronLinear::activation_function_eval_derivative_bias(double x,
......
......@@ -17,15 +17,14 @@ namespace lib4neuro {
double NeuronLinearSaturated::activate(double x,
double b) {
if (x + b < 0) {
this->activation_val = 0.0;
return 0.0;
}
else if (x + b > this->saturation_point) {
this->activation_val = this->saturation_point;
return this->saturation_point;
}
else {
this->activation_val = x + b;
return x + b;
}
return this->activation_val;
}
double NeuronLinearSaturated::activation_function_eval_derivative_bias(double x,
......
......@@ -23,8 +23,7 @@ namespace lib4neuro {
b);
double denom = (eb + ex);
this->activation_val = (eb * ex * (eb - ex)) / (denom * denom * denom);
return this->activation_val;
return (eb * ex * (eb - ex)) / (denom * denom * denom);
}
double NeuronLogistic_d2::activation_function_eval_derivative_bias(double x,
......@@ -67,8 +66,7 @@ namespace lib4neuro {
double d = (eb / ex);
double denom = (d + 1);
this->activation_val = d / (denom * denom);
return this->activation_val;
return d / (denom * denom);
}
double NeuronLogistic_d1::activation_function_eval_derivative_bias(double x,
......
......@@ -13,8 +13,7 @@ namespace lib4neuro {
double NeuronRectifier::activate(double x,
double b) {
this->activation_val = (0 < x + b) ? x + b : 0.0;
return this->activation_val;
return (0 < x + b) ? x + b : 0.0;
}
double NeuronRectifier::activation_function_eval_derivative_bias(double x,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment