Commit 4e564c9b authored by Michal Kravcenko's avatar Michal Kravcenko

added a new class representing a Differential Equation solver of arbitrary DEs of order at most 2.

edited the first ode example to use the new DESolver class
parent cd300571
......@@ -13,7 +13,7 @@ add_library(4neuro SHARED
NetConnection/ConnectionWeightIdentity.cpp
LearningMethods/ParticleSwarm.cpp
DataSet/DataSet.cpp
ErrorFunction/ErrorFunctions.cpp Network/NeuralNetworkSum.cpp Network/NeuralNetworkSum.h Solvers/PDESolver.cpp Solvers/PDESolver.h Neuron/NeuronLogistic_d1.cpp Neuron/NeuronLogistic_d1.h Neuron/NeuronLogistic_d2.cpp Neuron/NeuronLogistic_d2.h)
ErrorFunction/ErrorFunctions.cpp Network/NeuralNetworkSum.cpp Network/NeuralNetworkSum.h Solvers/DESolver.cpp Solvers/DESolver.h)
target_link_libraries(4neuro boost_serialization)
......
......@@ -19,7 +19,7 @@ MSE::MSE(NeuralNetwork *net, DataSet *ds) {
double MSE::eval(double *weights) {
unsigned int dim_out = this->ds->get_output_dim();
unsigned int dim_in = this->ds->get_input_dim();
// unsigned int dim_in = this->ds->get_input_dim();
size_t n_elements = this->ds->get_n_elements();
double error = 0.0, val;
......@@ -51,13 +51,13 @@ double MSE::eval(double *weights) {
return error/n_elements;
}
MSE_SUM::MSE_SUM() {
ERROR_SUM::ERROR_SUM() {
this->summand = nullptr;
this->summand_coefficient = nullptr;
this->dimension = 0;
}
MSE_SUM::~MSE_SUM(){
ERROR_SUM::~ERROR_SUM(){
if( this->summand ){
delete this->summand;
}
......@@ -66,17 +66,17 @@ MSE_SUM::~MSE_SUM(){
}
}
double MSE_SUM::eval(double *weights) {
double ERROR_SUM::eval(double *weights) {
double output = 0.0;
for( int i = 0; i < this->summand->size(); ++i ){
for( unsigned int i = 0; i < this->summand->size(); ++i ){
output += this->summand->at( i )->eval( weights ) * this->summand_coefficient->at( i );
}
return output;
}
void MSE_SUM::add_error_function(ErrorFunction *F, double alpha) {
void ERROR_SUM::add_error_function(ErrorFunction *F, double alpha) {
if(!this->summand){
this->summand = new std::vector<ErrorFunction*>(0);
}
......@@ -92,7 +92,7 @@ void MSE_SUM::add_error_function(ErrorFunction *F, double alpha) {
}
}
size_t MSE_SUM::get_dimension() {
size_t ERROR_SUM::get_dimension() {
// if(!this->dimension) {
// size_t max = 0;
// for(auto e : *this->summand) {
......
......@@ -8,6 +8,10 @@
#include "../Network/NeuralNetwork.h"
#include "../DataSet/DataSet.h"
enum ErrorFunctionType{
ErrorFuncMSE
};
class ErrorFunction {
public:
......@@ -55,17 +59,17 @@ private:
DataSet* ds;
};
class MSE_SUM : public ErrorFunction{
class ERROR_SUM : public ErrorFunction{
public:
/**
*
*/
MSE_SUM();
ERROR_SUM();
/**
*
*/
~MSE_SUM();
~ERROR_SUM();
/**
*
......
......@@ -401,7 +401,7 @@ void NeuralNetwork::determine_inputs_outputs() {
}
void NeuralNetwork::set_weight_array(std::vector<double> *weight_ptr) {
if(this->connection_weights){
if( this->connection_weights && this->delete_weights ){
delete this->connection_weights;
}
this->connection_weights = weight_ptr;
......
......@@ -259,6 +259,7 @@ class IDifferentiable {
* @return
*/
virtual Neuron* get_derivative( );
}; /* end of IDifferentiable class */
#endif /* NEURON_H_ */
\ No newline at end of file
......@@ -5,6 +5,172 @@
#include "NeuronLogistic.h"
Neuron* NeuronLogistic_d2::get_copy( ){
NeuronLogistic_d2* output = new NeuronLogistic_d2( this->activation_function_parameters[0], this->activation_function_parameters[1]);
return output;
}
NeuronLogistic_d2::NeuronLogistic_d2(double a, double b) {
this->n_activation_function_parameters = 2;
this->activation_function_parameters = new double[2];
this->activation_function_parameters[0] = a;
this->activation_function_parameters[1] = b;
this->edges_in = new std::vector<Connection*>(0);
this->edges_out = new std::vector<Connection*>(0);
}
void NeuronLogistic_d2::activate( ) {
//[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double ex = std::pow(E, b - x);
double ex2 = std::pow(ex + 1.0, -a - 1.0);
double ex3 = 1.0 / (1.0 + ex);
this->state = -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
}
double NeuronLogistic_d2::activation_function_eval_partial_derivative(int param_idx ) {
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
if(param_idx == 0){
//partial derivative according to parameter 'a'
//(e^b (e^(b - x) + 1)^(-a) (a^2 (-e^b) log(e^(b - x) + 1) + a e^x log(e^(b - x) + 1) + 2 a e^b - e^x))/(e^b + e^x)^2
double eb = std::pow(E, b);
double ex = std::pow(E, x);
double ebx= eb / ex;
double ebxa = std::pow(ebx + 1.0, -a);
double lbx = std::log(ebx + 1.0);
return eb * ebxa * (a * lbx * ( a * (-eb) + ex ) + 2.0 * a * eb - ex) / ((eb + ex) * (eb + ex));
}
else if(param_idx == 1){
//partial derivative according to parameter 'b'
//-(a e^b (e^(b - x) + 1)^(-a) (a^2 e^(2 b) - 3 a e^(b + x) - e^(b + x) + e^(2 x)))/(e^b + e^x)^3
double eb = std::pow(E, b);
double ex = std::pow(E, x);
double ebx= eb / ex;
double ebxa = std::pow(ebx + 1.0, -a);
return - a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
}
return 0.0;
}
double NeuronLogistic_d2::activation_function_eval_derivative() {
//(a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double eb = std::pow(E, b);
double ex = std::pow(E, x);
double ebx= eb / ex;
double ebxa = std::pow(ebx + 1.0, -a);
return a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
}
Neuron* NeuronLogistic_d1::get_copy( ){
NeuronLogistic_d1* output = new NeuronLogistic_d1( this->activation_function_parameters[0], this->activation_function_parameters[1]);
return output;
}
NeuronLogistic_d1::NeuronLogistic_d1(double a, double b) {
this->n_activation_function_parameters = 2;
this->activation_function_parameters = new double[2];
this->activation_function_parameters[0] = a;
this->activation_function_parameters[1] = b;
this->edges_in = new std::vector<Connection*>(0);
this->edges_out = new std::vector<Connection*>(0);
}
void NeuronLogistic_d1::activate( ) {
//a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double ex = std::pow(E, b - x);
this->state = a * ex * std::pow(1.0 + ex, -a - 1.0);
}
double NeuronLogistic_d1::activation_function_eval_partial_derivative(int param_idx ) {
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
if(param_idx == 0){
//partial derivative according to parameter 'a'
//e^(b - x) (1 + e^(b - x))^(-1 - a)[1 - a log(1 + e^(b - x))]
double ex = std::pow(E, b - x);
double ex2= std::pow(1.0 + ex, -1.0 - a);
return ex * ex2 * (1.0 - a * std::log(1.0 + ex));
}
else if(param_idx == 1){
//partial derivative according to parameter 'b'
//[(-1 - a) e^(b-x) (1 + e^(b - x))^(-1) + 1] * a e^(b - x) (1 + e^(b - x))^(-1 - a)
double ex = std::pow(E, b - x);
double ex2 = std::pow(ex + 1.0, -a - 1.0);
double ex3 = 1.0 / (1.0 + ex);
return a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
}
return 0.0;
}
double NeuronLogistic_d1::activation_function_eval_derivative() {
//[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double ex = std::pow(E, b - x);
double ex2 = std::pow(ex + 1.0, -a - 1.0);
double ex3 = 1.0 / (1.0 + ex);
return -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
}
NeuronLogistic_d2* NeuronLogistic_d1::get_derivative() {
//[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
NeuronLogistic_d2* output = nullptr;
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
output = new NeuronLogistic_d2(a, b);
return output;
}
Neuron* NeuronLogistic::get_copy( ){
NeuronLogistic* output = new NeuronLogistic( this->activation_function_parameters[0], this->activation_function_parameters[1]);
......@@ -77,7 +243,8 @@ double NeuronLogistic::activation_function_eval_derivative( ) {
}
Neuron* NeuronLogistic::get_derivative() {
NeuronLogistic_d1* NeuronLogistic::get_derivative() {
NeuronLogistic_d1 *output = nullptr;
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
......@@ -86,4 +253,5 @@ Neuron* NeuronLogistic::get_derivative() {
output->set_potential( this->potential );
return output;
}
\ No newline at end of file
......@@ -12,9 +12,100 @@
#include <cmath>
#include "Neuron.h"
#include "NeuronLogistic_d1.h"
#include "../constants.h"
class NeuronLogistic_d2:public Neuron, public IDifferentiable {
friend class boost::serialization::access;
protected:
template<class Archive>
void serialize(Archive & ar, const unsigned int version){
//TODO separate implementation to NeuronLogistic_d1.cpp!
ar & boost::serialization::base_object<Neuron>(*this);
};
public:
Neuron* get_copy( );
/**
* Constructs the object of the Logistic neuron with activation function
* f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
* @param[in] a First coefficient, stored in activation_function_parameters[0]
* @param[in] b Second coefficient, stored in activation_function_parameters[1]
*/
explicit NeuronLogistic_d2(double a = 0.0, double b = 0.0);
/**
* Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
*/
void activate( ) override;
/**
* Calculates the partial derivative of the activation function
* f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
* @param[in] param_idx Index of the parameter to calculate derivative of
* @return Partial derivative of the activation function according to the
* 'param_idx'-th parameter.
*/
double activation_function_eval_partial_derivative(int param_idx) override;
/**
* Calculates d/dx of [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
* @return (a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
*/
double activation_function_eval_derivative( ) override;
};
class NeuronLogistic_d1:public Neuron, public IDifferentiable {
friend class boost::serialization::access;
protected:
template<class Archive>
void serialize(Archive & ar, const unsigned int version){
//TODO separate implementation to NeuronLogistic_d1.cpp!
ar & boost::serialization::base_object<Neuron>(*this);
};
public:
Neuron* get_copy( );
/**
* Constructs the object of the Logistic neuron with activation function
* f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
* @param[in] a First coefficient, stored in activation_function_parameters[0]
* @param[in] b Second coefficient, stored in activation_function_parameters[1]
*/
explicit NeuronLogistic_d1(double a = 0.0, double b = 0.0);
/**
* Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
*/
void activate( ) override;
/**
* Calculates the partial derivative of the activation function
* f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
* @param[in] param_idx Index of the parameter to calculate derivative of
* @return Partial derivative of the activation function according to the
* 'param_idx'-th parameter.
*/
double activation_function_eval_partial_derivative(int param_idx) override;
/**
* Calculates d/dx of [a*e^(b - x)*(1 + e^(b - x))^(-1 - a)]
* @return [[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)]
*/
double activation_function_eval_derivative( ) override;
/**
* Returns a pointer to a Neuron with derivative as its activation function
* @return
*/
NeuronLogistic_d2* get_derivative() override;
};
class NeuronLogistic:public Neuron, public IDifferentiable {
friend class boost::serialization::access;
......@@ -59,7 +150,7 @@ public:
* Returns a pointer to a Neuron with derivative as its activation function
* @return
*/
Neuron* get_derivative() override;
NeuronLogistic_d1* get_derivative() override;
};
......
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 22.7.18 -
*/
#include "NeuronLogistic_d1.h"
Neuron* NeuronLogistic_d1::get_copy( ){
NeuronLogistic_d1* output = new NeuronLogistic_d1( this->activation_function_parameters[0], this->activation_function_parameters[1]);
return output;
}
NeuronLogistic_d1::NeuronLogistic_d1(double a, double b) {
this->n_activation_function_parameters = 2;
this->activation_function_parameters = new double[2];
this->activation_function_parameters[0] = a;
this->activation_function_parameters[1] = b;
this->edges_in = new std::vector<Connection*>(0);
this->edges_out = new std::vector<Connection*>(0);
}
void NeuronLogistic_d1::activate( ) {
//a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double ex = std::pow(E, b - x);
this->state = a * ex * std::pow(1.0 + ex, -a - 1.0);
}
double NeuronLogistic_d1::activation_function_eval_partial_derivative(int param_idx ) {
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
if(param_idx == 0){
//partial derivative according to parameter 'a'
//e^(b - x) (1 + e^(b - x))^(-1 - a)[1 - a log(1 + e^(b - x))]
double ex = std::pow(E, b - x);
double ex2= std::pow(1.0 + ex, -1.0 - a);
return ex * ex2 * (1.0 - a * std::log(1.0 + ex));
}
else if(param_idx == 1){
//partial derivative according to parameter 'b'
//[(-1 - a) e^(b-x) (1 + e^(b - x))^(-1) + 1] * a e^(b - x) (1 + e^(b - x))^(-1 - a)
double ex = std::pow(E, b - x);
double ex2 = std::pow(ex + 1.0, -a - 1.0);
double ex3 = 1.0 / (1.0 + ex);
return a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
}
return 0.0;
}
double NeuronLogistic_d1::activation_function_eval_derivative() {
//[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double ex = std::pow(E, b - x);
double ex2 = std::pow(ex + 1.0, -a - 1.0);
double ex3 = 1.0 / (1.0 + ex);
return -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
}
Neuron* NeuronLogistic_d1::get_derivative() {
//[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
NeuronLogistic_d2* output = nullptr;
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
output = new NeuronLogistic_d2(a, b);
return output;
}
\ No newline at end of file
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 22.7.18 -
*/
#ifndef INC_4NEURO_NEURONLOGISTIC_D1_H
#define INC_4NEURO_NEURONLOGISTIC_D1_H
#include <cmath>
#include "Neuron.h"
#include "NeuronLogistic_d2.h"
#include "../constants.h"
class NeuronLogistic_d1:public Neuron, public IDifferentiable {
friend class boost::serialization::access;
protected:
template<class Archive>
void serialize(Archive & ar, const unsigned int version){
//TODO separate implementation to NeuronLogistic_d1.cpp!
ar & boost::serialization::base_object<Neuron>(*this);
};
public:
Neuron* get_copy( );
/**
* Constructs the object of the Logistic neuron with activation function
* f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
* @param[in] a First coefficient, stored in activation_function_parameters[0]
* @param[in] b Second coefficient, stored in activation_function_parameters[1]
*/
explicit NeuronLogistic_d1(double a = 0.0, double b = 0.0);
/**
* Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
*/
void activate( ) override;
/**
* Calculates the partial derivative of the activation function
* f(x) = a*e^(b - x)*(1 + e^(b - x))^(-1 - a)
* @param[in] param_idx Index of the parameter to calculate derivative of
* @return Partial derivative of the activation function according to the
* 'param_idx'-th parameter.
*/
double activation_function_eval_partial_derivative(int param_idx) override;
/**
* Calculates d/dx of [a*e^(b - x)*(1 + e^(b - x))^(-1 - a)]
* @return [[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)]
*/
double activation_function_eval_derivative( ) override;
/**
* Returns a pointer to a Neuron with derivative as its activation function
* @return
*/
Neuron* get_derivative() override;
};
#endif //INC_4NEURO_NEURONLOGISTIC_D1_H
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 22.7.18 -
*/
#include "NeuronLogistic_d2.h"
Neuron* NeuronLogistic_d2::get_copy( ){
NeuronLogistic_d2* output = new NeuronLogistic_d2( this->activation_function_parameters[0], this->activation_function_parameters[1]);
return output;
}
NeuronLogistic_d2::NeuronLogistic_d2(double a, double b) {
this->n_activation_function_parameters = 2;
this->activation_function_parameters = new double[2];
this->activation_function_parameters[0] = a;
this->activation_function_parameters[1] = b;
this->edges_in = new std::vector<Connection*>(0);
this->edges_out = new std::vector<Connection*>(0);
}
void NeuronLogistic_d2::activate( ) {
//[(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double ex = std::pow(E, b - x);
double ex2 = std::pow(ex + 1.0, -a - 1.0);
double ex3 = 1.0 / (1.0 + ex);
this->state = -a * ex * ex2 * (1.0 - (a + 1.0) * ex * ex3);
}
double NeuronLogistic_d2::activation_function_eval_partial_derivative(int param_idx ) {
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
if(param_idx == 0){
//partial derivative according to parameter 'a'
//(e^b (e^(b - x) + 1)^(-a) (a^2 (-e^b) log(e^(b - x) + 1) + a e^x log(e^(b - x) + 1) + 2 a e^b - e^x))/(e^b + e^x)^2
double eb = std::pow(E, b);
double ex = std::pow(E, x);
double ebx= eb / ex;
double ebxa = std::pow(ebx + 1.0, -a);
double lbx = std::log(ebx + 1.0);
return eb * ebxa * (a * lbx * ( a * (-eb) + ex ) + 2.0 * a * eb - ex) / ((eb + ex) * (eb + ex));
}
else if(param_idx == 1){
//partial derivative according to parameter 'b'
//-(a e^b (e^(b - x) + 1)^(-a) (a^2 e^(2 b) - 3 a e^(b + x) - e^(b + x) + e^(2 x)))/(e^b + e^x)^3
double eb = std::pow(E, b);
double ex = std::pow(E, x);
double ebx= eb / ex;
double ebxa = std::pow(ebx + 1.0, -a);
return - a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
}
return 0.0;
}
double NeuronLogistic_d2::activation_function_eval_derivative() {
//(a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
double a = this->activation_function_parameters[0];
double b = this->activation_function_parameters[1];
double x = this->potential;
double eb = std::pow(E, b);
double ex = std::pow(E, x);
double ebx= eb / ex;
double ebxa = std::pow(ebx + 1.0, -a);
return a * eb * ebxa * (a * a * eb * eb - 3.0 * a * ebx - ebx + ex * ex) / ((eb + ex) * (eb + ex) * (eb + ex));
}
/**
* DESCRIPTION OF THE FILE
*
* @author Michal Kravčenko
* @date 22.7.18 -
*/
#ifndef INC_4NEURO_NEURONLOGISTIC_D2_H
#define INC_4NEURO_NEURONLOGISTIC_D2_H
#include <cmath>
#include "Neuron.h"
#include "../constants.h"
class NeuronLogistic_d2:public Neuron, public IDifferentiable {
friend class boost::serialization::access;
protected:
template<class Archive>
void serialize(Archive & ar, const unsigned int version){
//TODO separate implementation to NeuronLogistic_d1.cpp!
ar & boost::serialization::base_object<Neuron>(*this);
};
public:
Neuron* get_copy( );
/**
* Constructs the object of the Logistic neuron with activation function
* f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
* @param[in] a First coefficient, stored in activation_function_parameters[0]
* @param[in] b Second coefficient, stored in activation_function_parameters[1]
*/
explicit NeuronLogistic_d2(double a = 0.0, double b = 0.0);
/**
* Evaluates 'a*e^(b - x)*(1 + e^(b - x))^(-1 - a)' and stores the result into the 'state' property
*/
void activate( ) override;
/**
* Calculates the partial derivative of the activation function
* f(x) = [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
* @param[in] param_idx Index of the parameter to calculate derivative of
* @return Partial derivative of the activation function according to the
* 'param_idx'-th parameter.
*/
double activation_function_eval_partial_derivative(int param_idx) override;
/**
* Calculates d/dx of [(1 + a) e^(b-x) (1 + e^(b - x))^(-1) - 1]a e^(b - x) (1 + e^(b - x))^(-1 - a)
* @return (a e^b (1 + e^(b - x))^(-a) (a^2 e^(2 b) + e^(2 x) - e^(b + x) - 3 a e^(b + x)))/(e^b + e^x)^3
*/
double activation_function_eval_derivative( ) override;
};
#endif //INC_4NEURO_NEURONLOGISTIC_D2_H
This diff is collapsed.
/**
* File containing methods for quick & simple formulation of PDEs as a system of Neural Networks with shared weights