Skip to content
Snippets Groups Projects
Commit 5e1c7c35 authored by Martin Mrovec's avatar Martin Mrovec
Browse files

ENH: Added new neuron types

parent e101d92b
No related branches found
No related tags found
No related merge requests found
set N_CORES=3
\ No newline at end of file
......@@ -83,10 +83,14 @@ IF("${BUILD_LIB}" STREQUAL "yes")
${LIB_TYPE}
Neuron/Neuron.cpp
Neuron/NeuronBiased.cpp
Neuron/NeuronBinary.cpp
Neuron/NeuronConstant.cpp
Neuron/NeuronLinear.cpp
Neuron/NeuronLogistic.cpp
Neuron/NeuronRectifier.cpp
Neuron/NeuronFilter.cpp
Neuron/NeuronBinaryBiased.cpp
Network/NeuralNetwork.cpp
Network/NeuralNetworkSum.cpp
NetConnection/ConnectionFunctionGeneral.cpp
......
......@@ -13,7 +13,7 @@
namespace lib4neuro {
class NeuronBiased : public NeuronDifferentiable {
private:
protected:
double bias;
......
#include <boost/serialization/export.hpp>
#include "NeuronSerialization.h"
#include "NeuronBinaryBiasedSerialization.h"
#include "NeuronConstant.h"
BOOST_CLASS_EXPORT_IMPLEMENT(lib4neuro::NeuronBinaryBiased)
namespace lib4neuro {
NeuronBinaryBiased::NeuronBinaryBiased(double b) {
this->bias = b;
}
double NeuronBinaryBiased::activate(double x,
double b) {
if (x >= this->bias) {
this->activation_val = 1.0;
} else {
this->activation_val = 0.0;
}
return this->activation_val;
}
double NeuronBinaryBiased::activation_function_eval_derivative_bias(double x,
double b) {
// f'(0) = 0 for the purposes of training
return 0.0;
}
double NeuronBinaryBiased::activation_function_eval_derivative(double x,
double b) {
// f'(0) = 0 for the purposes of training
return 0.0;
}
Neuron* NeuronBinaryBiased::get_derivative() {
NeuronConstant* output = new NeuronConstant(0.0);
return output;
}
}
\ No newline at end of file
/**
* DESCRIPTION OF THE CLASS
*
* @author Martin Beseda
* @author Martin Mrovec
* @author Michal Kravčenko
* @date 2017 - 2019
*/
#ifndef INC_4NEURO_NEURONBINARYBIASED_H
#define INC_4NEURO_NEURONBINARYBIASED_H
#include "NeuronBiased.h"
namespace lib4neuro {
/**
* BinaryBiased neuron class - uses unit-step as the activation function
*/
class NeuronBinaryBiased : public NeuronBiased {
public:
/**
* Struct used to access private properties from
* the serialization function
*/
struct access;
/**
* Default constructor for the BinaryBiased Neuron
* @param[in] threshold Denotes when the neuron is activated
* When neuron potential exceeds 'threshold' value it becomes excited
*/
LIB4NEURO_API explicit NeuronBinaryBiased(double b = 0.0);
/**
* Performs the activation function and stores the result into the 'state' property
*/
LIB4NEURO_API double activate(double x,
double b) override;
LIB4NEURO_API double activation_function_eval_derivative_bias(double x,
double b) override;
LIB4NEURO_API double activation_function_eval_derivative(double x,
double b) override;
LIB4NEURO_API Neuron* get_derivative() override;
};
}
#endif //INC_4NEURO_NEURONBINARYBIASED_H
#ifndef LIB4NEURO_NEURON_BINARYBIASED_SERIALIZATION_H
#define LIB4NEURO_NEURON_BINARYBIASED_SERIALIZATION_H
#include <boost/serialization/base_object.hpp>
#include <boost/archive/text_oarchive.hpp>
#include <boost/archive/text_iarchive.hpp>
#include <boost/serialization/export.hpp>
#include "NeuronSerialization.h"
#include "NeuronBinaryBiased.h"
BOOST_CLASS_EXPORT_KEY(lib4neuro::NeuronBinaryBiased);
namespace lib4neuro {
struct NeuronBinaryBiased::access {
template<class Archive>
static void serialize(Archive& ar,
lib4neuro::NeuronBinaryBiased& n,
const unsigned int version) {
ar & boost::serialization::base_object<Neuron>(n);
}
};
}
namespace boost {
namespace serialization {
/**
* Serialization function
* @tparam Archive Boost library template
* @param ar Boost parameter - filled automatically during serialization!
* @param n NeuronBinaryBiased instance
* @param version Boost parameter - filled automatically during serialization!
*/
template<class Archive>
void serialize(Archive& ar,
lib4neuro::NeuronBinaryBiased& n,
const unsigned int version) {
lib4neuro::NeuronBinaryBiased::access::serialize(ar,
n,
version);
}
} // namespace serialization
} // namespace boost
#endif //LIB4NEURO_NEURON_BINARYBIASED_SERIALIZATION_H
......@@ -11,15 +11,17 @@
#include "NeuronBinary.h"
BOOST_CLASS_EXPORT_KEY(lib4neuro::NeuronBinary);
struct lib4neuro::NeuronBinary::access {
template<class Archive>
static void serialize(Archive& ar,
lib4neuro::NeuronBinary& n,
const unsigned int version) {
ar & boost::serialization::base_object<lib4neuro::Neuron>(n);
}
};
namespace lib4neuro {
struct NeuronBinary::access {
template<class Archive>
static void serialize(Archive& ar,
lib4neuro::NeuronBinary& n,
const unsigned int version) {
ar & boost::serialization::base_object<Neuron>(n);
}
};
}
namespace boost {
namespace serialization {
......
#include <boost/serialization/export.hpp>
#include "NeuronRectifier.h"
#include "NeuronBinary.h"
#include "Neuron.h"
#include "NeuronSerialization.h"
#include "NeuronFilterSerialization.h"
BOOST_CLASS_EXPORT_IMPLEMENT(lib4neuro::NeuronFilter);
namespace lib4neuro {
NeuronFilter::NeuronFilter(double b) {
this->bias = b;
}
double NeuronFilter::activate(double x,
double b) {
this->activation_val = (0 < x + this->bias) ? x : 0.0;
return this->activation_val;
}
double NeuronFilter::activation_function_eval_derivative_bias(double x,
double b) {
// f'(0) = 0 for the purposes of training
return 0.0;
}
double NeuronFilter::activation_function_eval_derivative(double x,
double b) {
// f'(0) = 0 for the purposes of training
return ((x + this->bias) > 0) ? 1.0 : 0.0;
}
Neuron* NeuronFilter::get_derivative() {
NeuronBinary* output = new NeuronBinary();
return output;
}
}
\ No newline at end of file
#include "Neuron.h"
#include "NeuronBiased.h"
#ifndef LIB4NEURO_NEURONFILTER_H
#define LIB4NEURO_NEURONFILTER_H
namespace lib4neuro {
/**
* Filter linear unit neuron class - uses activation function in the form f(x) = max(0, x),
* 'x' being the neuron's potential
*/
class NeuronFilter : public NeuronBiased {
public:
struct access;
LIB4NEURO_API explicit NeuronFilter(double b = 0.0);
LIB4NEURO_API double activate(double x,
double b) override;
LIB4NEURO_API double activation_function_eval_derivative_bias(double x,
double b) override;
LIB4NEURO_API double activation_function_eval_derivative(double x,
double b) override;
LIB4NEURO_API Neuron* get_derivative() override;
};
}
#endif //LIB4NEURO_NEURONFilter_H
#ifndef LIB4NEURO_NEURONFILTERSERIALIZATION_H
#define LIB4NEURO_NEURONFILTERSERIALIZATION_H
#include <boost/serialization/base_object.hpp>
#include <boost/archive/text_oarchive.hpp>
#include <boost/archive/text_iarchive.hpp>
#include <boost/serialization/export.hpp>
#include "NeuronFilter.h"
#include "NeuronSerialization.h"
BOOST_CLASS_EXPORT_KEY(lib4neuro::NeuronFilter);
namespace lib4neuro {
struct NeuronFilter::access {
template<class Archive>
static void serialize(Archive& ar,
NeuronFilter& n,
const unsigned int version) {
ar & boost::serialization::base_object<Neuron>(n);
}
};
}
namespace boost {
namespace serialization {
/**
* Serialization function
* @tparam Archive Boost library template
* @param ar Boost parameter - filled automatically during serialization!
* @param n NeuronFilter instance
* @param version Boost parameter - filled automatically during serialization!
*/
template<class Archive>
void serialize(Archive& ar,
lib4neuro::NeuronFilter& n,
const unsigned int version) {
lib4neuro::NeuronFilter::access::serialize(ar,
n,
version);
}
} // namespace serialization
} // namespace boost
#endif //LIB4NEURO_NEURONFILTERSERIALIZATION_H
......@@ -2,6 +2,8 @@
#include <boost/serialization/export.hpp>
#include "NeuronRectifier.h"
#include "NeuronBinary.h"
#include "NeuronSerialization.h"
#include "NeuronRectifierSerialization.h"
BOOST_CLASS_EXPORT_IMPLEMENT(lib4neuro::NeuronRectifier);
......@@ -11,24 +13,25 @@ namespace lib4neuro {
double NeuronRectifier::activate(double x,
double b) {
this->activation_val = std::max(0,
x + b);
this->activation_val = (0 < x + b) ? x + b : 0.0;
return this->activation_val;
}
double NeuronRectifier::activation_function_eval_derivative(double x,
double b) {
double NeuronRectifier::activation_function_eval_derivative_bias(double x,
double b) {
// f'(0) = 0 for the purposes of training
return ((x + b) > 0) ? 1 : 0;
return ((x + b) > 0) ? 1.0 : 0.0;
}
double NeuronRectifier::activation_function_eval_derivative(double x,
double b) {
// f'(0) = 0 for the purposes of training
return ((x + b) > 0) ? 1 : 0;
return ((x + b) > 0) ? 1.0 : 0.0;
}
Neuron* NeuronRectifier::get_derivative() {
NeuronConstant* output = new NeuronConstant();
NeuronBinary* output = new NeuronBinary();
return output;
}
......
#include "Neuron.h"
#ifndef LIB4NEURO_NEURONRECTIFIER_H
#define LIB4NEURO_NEURONRECTIFIER_H
#ifndef INC_4NEURO_NEURONRECTIFIER_H
#define INC_4NEURO_NEURONRECTIFIER_H
namespace lib4neuro {
......@@ -10,7 +10,7 @@ namespace lib4neuro {
* Rectifier linear unit neuron class - uses activation function in the form f(x) = max(0, x),
* 'x' being the neuron's potential
*/
class NeuronRectifier : NeuronDifferentiable {
class NeuronRectifier : public NeuronDifferentiable {
public:
struct access;
......@@ -29,4 +29,4 @@ namespace lib4neuro {
};
}
#endif //LIB4NEURO_NEURONRECTIFIER_H
#endif //INC_4NEURO_NEURONRECTIFIER_H
......@@ -16,7 +16,7 @@ namespace lib4neuro {
struct NeuronRectifier::access {
template<class Archive>
static void serialize(Archive& ar,
Neuronrectifier& n,
NeuronRectifier& n,
const unsigned int version) {
ar & boost::serialization::base_object<Neuron>(n);
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment