...
 
Commits (263)
......@@ -6,3 +6,7 @@
compilers.env
*TestRunner*
*_fun.f90
src/funit.tmp
*.swp
/Release/
.idea/*
before_script:
- apt-get update
- apt-get install -y -qq make gfortran cmake gem
- gem install funit
- ./build.sh
job:
tags:
- centos7
run_tests:
script: "./run_tests.sh"
image: martinbeseda/dockertest:latest
before_script:
- git clone https://github.com/ArashPartow/exprtk.git
- cp exprtk/exprtk.hpp /usr/include
- export TERM=xterm
- ./build-ci.sh
script: "./run_tests.sh"
Summary
(Summarize the bug encountered concisely)
Steps to reproduce
(How one can reproduce the issue - this is very important)
What is the current bug behavior?
(What actually happens)
What is the expected correct behavior?
(What you should see instead)
Relevant logs and/or screenshots
(Paste any relevant logs - please use code blocks (```) to format console output,
logs, and code as it's very hard to read otherwise.)
Possible fixes
(If you can, link to the line of code that might be responsible for the problem)
/label ~bug ~needs-investigation
Summary
(Summarize the bug encountered concisely)
Associated classes
(Classes, which need to be modified, if any)
/label ~bug ~reproduced ~needs-investigation
cmake_minimum_required(VERSION 3.0)
message ("Before enable language")
enable_language(Fortran)
if (WIN32)
message ("cmake for " ${CMAKE_Fortran_COMPILER})
set (CMAKE_FORTRAN_COMPILER ${CMAKE_Fortran_COMPILER})
project(4Neuro FORTRAN)
else ()
project(4Neuro)
endif ()
message ("Start cmakeList")
#-------------------------------#
# Default installation location #
#-------------------------------#
# Linux: /usr/local
# Windows: c:/Program Files
#------------#
# Build type #
#------------#
# Default: Release
# Others: None, Debug
if (NOT CMAKE_BUILD_TYPE)
set (CMAKE_BUILD_TYPE RELEASE CACHE STRING
"Choose the type of build, options are: None Debug Release."
FORCE)
endif (NOT CMAKE_BUILD_TYPE)
#--------------------------------#
# Setting Fortran compiler flags #
#--------------------------------#
# TODO overit, jak rychle jede kod s funroll-loops
# funroll-all-loops a bez prepinace
set(standard "-std=f2008")
#--------------------#
# Automatic settings #
#--------------------#
get_filename_component (Fortran_COMPILER_NAME ${CMAKE_Fortran_COMPILER} NAME)
# Processing user variables
if (WITH_TIME_PROFILING)
add_definitions(-DTIME_PROFILING)
endif()
# C & CXX compilers
if (WIN32)
set (CMAKE_C_COMPILER gcc)
set (CMAKE_CXX_COMPILER g++)
endif()
# Write compiler variables to the file - to pass them to test script
file(WRITE compilers.env "export FC=${CMAKE_Fortran_COMPILER}\n")
file(APPEND compilers.env "export CXX=${CMAKE_CXX_COMPILER}\n")
file(APPEND compilers.env "export CC=${CMAKE_C_COMPILER}\n")
# Fortran compiler detection
message ("Fortran compiler name: " ${Fortran_COMPILER_NAME})
message ("CMAKE_Fortran_COMPILER full path: " ${CMAKE_Fortran_COMPILER})
message ("Fortran compiler: " ${Fortran_COMPILER_NAME})
message ("Build type:" ${CMAKE_BUILD_TYPE})
if (Fortran_COMPILER_NAME MATCHES "gfortran.*")
# gfortran
set (CMAKE_Fortran_FLAGS_RELEASE "-funroll-loops -fno-f2c -O3 ${standard} -cpp")
set (CMAKE_Fortran_FLAGS_DEBUG "-fno-f2c -O0 -g ${standard} -cpp -fimplicit-none -Wall -pedantic -fcheck=all -fbacktrace")
elseif (Fortran_COMPILER_NAME MATCHES "f95")
set (CMAKE_Fortran_FLAGS_RELEASE "-funroll-loops -fno-f2c -O3 ${standard} -cpp")
set (CMAKE_Fortran_FLAGS_DEBUG "-fno-f2c -O0 -g ${standard} -cpp -fimplicit-none -Wall -pedantic -fcheck=all -fbacktrace")
elseif (Fortran_COMPILER_NAME MATCHES "ifort.*")
# ifort (untested)
set (CMAKE_Fortran_FLAGS_RELEASE "-f77rtl -O3 ${standard} -cpp")
set (CMAKE_Fortran_FLAGS_DEBUG "-f77rtl -O0 -g ${standard} -cpp -fimplicit-none -Wall -pedantic -fcheck=all -fbacktrace")
elseif (Fortran_COMPILER_NAME MATCHES "g77")
# g77
set (CMAKE_Fortran_FLAGS_RELEASE "-funroll-loops -fno-f2c -O3 -m32 ${standard} -cpp")
set (CMAKE_Fortran_FLAGS_DEBUG "-fno-f2c -O0 -g -m32 ${standard} -cpp -fimplicit-none -Wall -pedantic -fcheck=all -fbacktrace")
else (Fortran_COMPILER_NAME MATCHES "gfortran.*")
message ("No optimized Fortran compiler flags are known, we just try -O2 and we DO NOT support directives preprocessing (i.e. all conditional blocks will be compiled)...")
set (CMAKE_Fortran_FLAGS_RELEASE "-O2 ${standard}")
set (CMAKE_Fortran_FLAGS_DEBUG "-O0 -g ${standard} -Wall -pedantic -fcheck=all")
endif (Fortran_COMPILER_NAME MATCHES "gfortran.*")
#----------------#
# User variables #
#----------------#
set(SRC_DIR src)
set(BUILD_DIR build)
set(LIB_DIR lib)
#--------------------#
# Building libraries #
#--------------------#
#link_directories("${BUILD_DIR}/${LIB_DIR}")
include_directories("${BUILD_DIR}/${LIB_DIR}")
add_subdirectory("${SRC_DIR}" "${LIB_DIR}")
message ("Current directory:" ${PWD})
message ("SRC_DIR: " ${SRC_DIR})
message ("BUILD_DIR:" ${BUILD_DIR})
message ("LIB_DIR: " ${LIB_DIR})
if (WIN32)
add_executable(connection_m_mem_leak_test ${SRC_DIR}/connection_m_mem_leak_test.f90)
target_link_libraries(connection_m_mem_leak_test connection_m)
message ("Windows")
else ()
add_executable(connection_m_mem_leak_test.out ${SRC_DIR}/connection_m_mem_leak_test.f90)
target_link_libraries(connection_m_mem_leak_test.out connection_m)
message ("Not Windows")
endif ()
cmake_minimum_required(VERSION 3.0)
project(lib4neuro)
#cmake_policy(SET CMP0074 NEW)
#cmake_policy(SET CMP0060 NEW)
#if(WIN32)
# message("Setting no library prefixes...")
# set(CMAKE_STATIC_LIBRARY_PREFIX "")
# set(CMAKE_SHARED_LIBRARY_PREFIX "")
# set(CMAKE_SHARED_MODULE_PREFIX "")
# set(CMAKE_IMPORT_LIBRARY_PREFIX "")
# set(PREFIX "")
# set(IMPORT_PREFIX "")
#endif()
#-------------------------------#
# Default installation location #
#-------------------------------#
# Linux: /usr/local
# Windows: c:/Program Files
#------------#
# Build type #
#------------#
# Default: Release
# Others: None, Debug
if (NOT CMAKE_BUILD_TYPE)
set (CMAKE_BUILD_TYPE RELEASE CACHE STRING
"Choose the type of build, options are: None Debug Release."
FORCE)
endif (NOT CMAKE_BUILD_TYPE)
#--------------------------------#
# Setting C++ compiler flags #
#--------------------------------#
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -mtune=native" )
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -xHost" )
endif()
#--------------------#
# Automatic settings #
#--------------------#
if(CMAKE_BUILD_TYPE MATCHES DEBUG)
set(CMAKE_VERBOSE_MAKEFILE ON)
endif()
#-------------------------#
# Find external libraries #
#-------------------------#
message("Looking for external libraries...")
set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR})
set(Boost_USE_MULTITHREADED ON)
find_package(Boost REQUIRED COMPONENTS serialization random)
if(Boost_FOUND)
message("Boost INCLUDE DIR: ${Boost_INCLUDEDIR}")
message("Boost LIB DIR: ${Boost_LIBRARY_DIRS}")
include_directories(${Boost_INCLUDE_DIR} ${Boost_INCLUDEDIR} ${Boost_INCLUDE_DIRS})
link_directories(${Boost_LIBRARY_DIRS})
endif()
find_package(exprtk)
#------------------------------------------#
# Detect maximum available number of cores #
# and set corresponding build options #
#------------------------------------------#
message("Detecting available cores count...")
include(ProcessorCount)
ProcessorCount(n_cores)
if(n_cores GREATER 1)
math(EXPR n_cores "${n_cores}-1")
message("Build will be performed on ${n_cores} cores.")
set(CTEST_BUILD_FLAGS -j${N})
set(ENV{N_CORES} ${N})
set(ctest_test_args ${ctest_test_args} PARALLEL_LEVEL ${N})
endif()
#---------------#
# Set variables #
#---------------#
set(SRC_DIR src)
set(PROJECT_BINARY_DIR build)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY lib)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY lib)
include_directories(include ${SRC_DIR})
add_subdirectory(${SRC_DIR} ${PROJECT_BINARY_DIR})
......@@ -32,7 +32,7 @@ DOXYFILE_ENCODING = UTF-8
# title of most generated pages and in a few other places.
# The default value is: My Project.
PROJECT_NAME = "4Neuro"
PROJECT_NAME = "lib4neuro"
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
# could be handy for archiving the generated documentation or if some version
......@@ -44,14 +44,14 @@ PROJECT_NUMBER =
# for a project that appears at the top of each page and should give viewer a
# quick idea about the purpose of the project. Keep the description short.
PROJECT_BRIEF = "Fortran neural networks library"
PROJECT_BRIEF = "Massivelly-parallel neural networks library"
# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
# in the documentation. The maximum height of the logo should not exceed 55
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
# the logo to the output directory.
PROJECT_LOGO = "img/4neuro_logo_small.png"
PROJECT_LOGO = "img/lib4neuro_logo.png"
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
# into which the generated documentation will be written. If a relative path is
......@@ -266,7 +266,7 @@ OPTIMIZE_OUTPUT_JAVA = NO
# sources. Doxygen will then generate output that is tailored for Fortran.
# The default value is: NO.
OPTIMIZE_FOR_FORTRAN = YES
OPTIMIZE_FOR_FORTRAN = NO
# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
# sources. Doxygen will then generate output that is tailored for VHDL.
......@@ -291,7 +291,7 @@ OPTIMIZE_OUTPUT_VHDL = NO
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
# the files are not read by doxygen.
EXTENSION_MAPPING = f03=FortranFree f08=FortranFree
EXTENSION_MAPPING =
# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
# according to the Markdown format, which allows for more readable
......@@ -441,7 +441,7 @@ EXTRACT_ALL = NO
# be included in the documentation.
# The default value is: NO.
EXTRACT_PRIVATE = NO
EXTRACT_PRIVATE = YES
# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
# scope will be included in the documentation.
......@@ -453,7 +453,7 @@ EXTRACT_PACKAGE = NO
# included in the documentation.
# The default value is: NO.
EXTRACT_STATIC = NO
EXTRACT_STATIC = YES
# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
# locally in source files will be included in the documentation. If set to NO,
......@@ -864,7 +864,7 @@ FILE_PATTERNS = *.c \
# be searched for input files as well.
# The default value is: NO.
RECURSIVE = NO
RECURSIVE = YES
# The EXCLUDE tag can be used to specify files and/or directories that should be
# excluded from the INPUT source files. This way you can easily exclude a
......@@ -2375,7 +2375,7 @@ DOT_IMAGE_FORMAT = png
# The default value is: NO.
# This tag requires that the tag HAVE_DOT is set to YES.
INTERACTIVE_SVG = NO
INTERACTIVE_SVG = YES
# The DOT_PATH tag can be used to specify the path where the dot tool can be
# found. If left blank, it is assumed the dot tool can be found in the path.
......
This diff is collapsed.
################################################################################
#
# CMake script for finding ExprTk.
# The default CMake search process is used to locate files.
#
# This script creates the following variables:
# EXPRTK_FOUND: Boolean that indicates if the package was found
# EXPRTK_INCLUDE_DIRS: Paths to the necessary header files
#
################################################################################
# Find headers and libraries
FIND_PATH(
EXPRTK_INCLUDE_DIR
NAMES
exprtk.hpp
HINTS
$ENV{EXPRTK_INCLUDE_DIR}
$ENV{EXPRTK_ROOT}
${EXPRTK_ROOT}
PATHS
/home
/usr/local
/usr
/opt/local
PATH_SUFFIXES
include
)
# Set EXPRTK_FOUND honoring the QUIET and REQUIRED arguments
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(
ExprTk
"ExprTk was NOT found!"
EXPRTK_INCLUDE_DIR)
# Output variables
IF(EXPRTK_FOUND)
# Include dirs
SET(EXPRTK_INCLUDE_DIRS ${EXPRTK_INCLUDE_DIR})
ELSE()
MESSAGE(FATAL_ERROR "Set, please, the environmental variable EXPRTK_INCLUDE_DIR to the folder, where 'exprtk.hpp' is located...")
ENDIF(EXPRTK_FOUND)
# Advanced options for not cluttering the cmake UIs:
MARK_AS_ADVANCED(EXPRTK_INCLUDE_DIR)
include_directories(${EXPRTK_INCLUDE_DIRS})
4Neuro v.0.1 - doplnit ASCII
## Dependencies
cmake (version>=3.0), make, Fortran compiler, Linux/Windows
- [Git](https://git-scm.com/)
- [cmake](https://cmake.org/) (version >= 3.0)
- [Boost](https://www.boost.org/)
- [exprtk](http://www.partow.net/programming/exprtk/)
## Installation
#### Linux
- `make`
- C++ compiler
#### Windows
1. Clone the repository to your PC
```
git clone git@code.it4i.cz:moldyn/4Neuro.git
```
- [Visual Studio](https://visualstudio.microsoft.com/) (install together with `cl` and `SDK`!)
## Compilation
- Clone the repository to your PC
2. Go to the repository folder
3. Create folder 'build'
4. Go to the 'build' folder
5. Run `cmake` with following parameters
- `MAKEFILE_TYPE` "MSYS Makefiles" for MSYS, "MinGW Makefiles" for MinGW
and "Unix Makefiles" for Cygwin
- `BUILD_TYPE` Release/Debug
- `WITH_TIME_PROFILING` 0/1
```
git clone git@code.it4i.cz:moldyn/lib4neuro.git
```
- Clone the repository of `exprtk`
```
cmake -G MAKEFILE_TYPE -DCMAKE_BUILD_TYPE=BUILD_TYPE -DWITH_TIME_PROFILING:BOOLEAN=WITH_TIME_PROFILING ..
git clone https://github.com/ArashPartow/exprtk.git
```
6. Run `make`
7. Run `make install` (if you want to have 4Neuro in system paths)
#### Linux (Debian-based)
- Install package `libboost-all-dev`
#### Linux
1. Clone the repository to your PC
```
git clone git@code.it4i.cz:moldyn/4Neuro.git
```
```
sudo apt-get install libboost-all-dev
```
- Set `EXPRTK_INCLUDE_DIR` environmental variable to the path of your `exprtk` folder
```
export EXPRTK_INCLUDE_DIR="path to the exprtk folder"
```
- Go to the `lib4Neuro` folder
2. Go to the repository folder
3. Set up variables in 'build.sh'
4. Run `./build.sh`
- In the file `build.sh` set correctly variables `BUILD_TYPE` and `CXX_COMPILER`
## Removing installation
- Run
```
./build.sh
```
#### Windows
1. Go to the repository folder
2. Delete 'build' folder
- Download `Boost` from [this link](https://www.boost.org/users/download/) and extract the archive
- Run the Visual Studio Command Prompt (Start Menu -> Visual Studio *** -> Developer Command Prompt)
and go to the folder with `Boost`
```
cd "folder with Boost"
bootstrap.bat
b2.exe --build-type=complete
```
- Set `BOOST_ROOT` environmental variable to the path of your folder containing `Boost`
1. In Search, search for and then select: System (Control Panel)
2. Click the Advanced system settings link.
3. Click Environment Variables.
4. In the New System Variable window, specify the value of the `BOOST_ROOT` environment variable
- Set `EXPRTK_INCLUDE_DIR` environmental variable to the path of your `exprtk` folder
- Go to the `lib4neuro` folder
- In the file `build.bat` set correctly variables `BUILD_TYPE`, `CXX_COMPILER` and `C_COMPILER`
- Run
```
build.bat
```
#### Linux
1. Go to the repository folder
2. Run `./clean.sh`
#!/bin/sh
#------------#------------------------------------------------------------
# Parameters #
#------------#
# Build type (Release/Debug)
BUILD_TYPE=Debug
# C++ compiler
CXX_COMPILER="g++-8"
if [ -z "$BUILD_TYPE" ] || [ -z "$CXX_COMPILER" ]; then
(>&2 echo "Set, please, both BUILD_TYPE and CXX_COMPILER variables in the 'build.sh' script.")
exit 2
fi
$(pwd)/clean.sh
cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DCMAKE_CXX_COMPILER=${CXX_COMPILER} .
cmake --build . --config ${BUILD_TYPE} -- -j${N_CORES} && (tput setaf 2; echo "Build complete."; echo "For examples have a look at the folder build/bin/examples."; tput sgr 0; ) || (tput setaf 1; echo "Build finished with errors!"; tput sgr 0; exit 1;)
@echo off
title Build lib4neuro project
rem Build type (Release/Debug)
set BUILD_TYPE=Debug
rem C++ compiler
set CXX_COMPILER=cl
set C_COMPILER=cl
rem Makefile generator
rem For the complete list type "cmake --help"
rem Example: "MSYS Makefiles", "MinGW Makefiles", "NMake Makefiles"
set MAKEFILE_GENERATOR="Visual Studio 15 2017"
call clean.bat
cmake -G %MAKEFILE_GENERATOR% -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DCMAKE_BUILD_TYPE=%BUILD_TYPE% -DCMAKE_CXX_COMPILER=%CXX_COMPILER% -DCMAKE_C_COMPILER=%C_COMPILER% -DBOOST_ROOT=%BOOST_ROOT% -DBOOST_LIBRARYDIR=%BOOST_LIBRARYDIR% -DBOOST_INCLUDEDIR=%BOOST_INCLUDEDIR% -DBoost_DEBUG:BOOL=ON .
cmake --build . --config %BUILD_TYPE% && (echo (Build complete.); echo (For examples have a look at the folder build/bin/examples.)) || (echo "Build finished with errors!")
#!/bin/bash
#!/bin/sh
#------------#------------------------------------------------------------
# Parameters #
#------------#
# Fortran compiler
# FORTRAN_COMPILER=/mingw32/bin/gfortran
# FORTRAN_COMPILER=gfortran
# Build type (Release/Debug)
BUILD_TYPE=Debug
BUILD_TYPE=
# Turn of/off time profiling (1/0)
WITH_TIME_PROFILING=0
# C++ compiler
CXX_COMPILER=
#--------------#----------------------------------------------------------
# Library path #
#--------------#
if [ -z "$BUILD_TYPE" ] || [ -z "$CXX_COMPILER" ]; then
(>&2 echo "Set, please, both BUILD_TYPE and CXX_COMPILER variables in the 'build.sh' script.")
exit 2
fi
MAKEFILE_TYPE=""
LIB_PATH="$PWD/build/lib"
#LIB_PATH_WIN="$CD\\build\\lib"
echo ${LIB_PATH}
#echo ${LIB_PATH_WIN}
case `uname -s` in
Darwin)
echo 'OS X not supported! Buy a normal computer, please.'
exit -1
;;
Linux)
MAKEFILE_TYPE="Unix Makefiles"
if [[ ${LD_LIBRARY_PATH} != *${LIB_PATH}* ]]; then
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${LIB_PATH}
fi
;;
CYGWIN*)
MAKEFILE_TYPE="Unix Makefiles"
# if [[ ${PATH} != *${LIB_PATH_WIN}* ]]; then
# export PATH=${PATH};${LIB_PATH_WIN}
# fi
;;
MINGW*)
MAKEFILE_TYPE="MinGW Makefiles"
# if [[ ${PATH} != *${LIB_PATH_WIN}* ]]; then
# export PATH=${PATH};${LIB_PATH_WIN}
# fi
;;
MSYS*)
MAKEFILE_TYPE="MSYS Makefiles"
echo ${MAKEFILE_TYPE}
# if [[ ${PATH} != *${LIB_PATH_WIN}* ]]; then
# export PATH=${PATH};${LIB_PATH_WIN}
# fi
;;
*)
echo "Your makefile generator `uname -s` is not yet supported. Please, write an issue to https://code.it4i.cz/moldyn/4Neuro"
exit -1
;;
esac
#-------------------------------------------------------------------------
echo "Creating folder 'build'...";
mkdir -p build/lib;
echo "Folder 'build' was created'";
cd build;
#cmake -G "MSYS Makefiles" -DCMAKE_Fortran_COMPILER=${FORTRAN_COMPILER} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DWITH_TIME_PROFILING:BOOLEAN=${WITH_TIME_PROFILING} ..
cmake -G "${MAKEFILE_TYPE}" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DWITH_TIME_PROFILING:BOOLEAN=${WITH_TIME_PROFILING} ..
make VERBOSE=1 && echo "Build complete." || exit -1;
#make install;
$(pwd)/clean.sh
cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DCMAKE_CXX_COMPILER=${CXX_COMPILER} .
cmake --build . --config ${BUILD_TYPE} -- -j${N_CORES} && (tput setaf 2; echo "Build complete."; echo "For examples have a look at the folder build/bin/examples."; tput sgr 0; ) || (tput setaf 1; echo "Build finished with errors!"; tput sgr 0; exit 1;)
#!/bin/bash
echo "Generating documentation into folder 'docs'...";
doxygen >/dev/null && echo "Documenation was generated." || exit -1;
doxygen 2>&1;
cd docs/latex;
echo "Building LaTeX documentation../";
make >/dev/null && echo "Documentation was built." || exit -1;
......
@echo off
rmdir /s /q build 2>NUL
del Makefile 2>NUL
del /s /q "docs/*" 2>NUL
del "src/*TestRunner*" 2>NUL
del "src/*.o src/*.mod" 2>NUL
del src/funit.tmp 2>NUL
del "src/*_fun.f90" 2>NUL
del CMakeCache.txt 2>NUL
del cmake_install.cmake 2>NUL
del src/cmake_install.cmake 2>NUL
del /s /q CMakeFiles 2>NUL
del "*.vcxproj" 2>NUL
del "*.vcxproj.filters" 2>NUL
del "*.sln" 2>NUL
rmdir /s /q CMakeFiles 2>NUL
rmdir /s /q src/CMakeFiles 2>NUL
rmdir /s /q src/examples/CMakeFiles 2>NUL
rmdir /s /q src/tests/CMakeFiles 2>NUL
#!/bin/bash
rm -rf build
rm -rf Makefile
rm -rf docs/*
rm -f src/*TestRunner*
rm -f src/*.o src/*.mod
rm -f src/funit.tmp src/*_fun.f90
rm -f CMakeCache.txt
rm -f cmake_install.cmake src/cmake_install.cmake
rm -rf CMakeFiles src/CMakeFiles src/examples/CMakeFiles src/tests/CMakeFiles
//
// Created by martin on 7/16/18.
//
#ifndef INC_4NEURO_4NEURO_H
#define INC_4NEURO_4NEURO_H
//TODO make only public interface visible
#include "../src/DataSet/DataSet.h"
#include "../src/ErrorFunction/ErrorFunctions.h"
#include "../src/LearningMethods/ParticleSwarm.h"
#include "../src/NetConnection/ConnectionFunctionGeneral.h"
#include "../src/NetConnection/ConnectionFunctionIdentity.h"
#include "../src/Network/NeuralNetwork.h"
#include "../src/Network/NeuralNetworkSum.h"
#include "../src/Neuron/Neuron.h"
#include "../src/Neuron/NeuronBinary.h"
#include "../src/Neuron/NeuronLinear.h"
#include "../src/Neuron/NeuronLogistic.h"
#endif //INC_4NEURO_4NEURO_H
This diff is collapsed.
package cz.vsb.mro0010.neuralnetworks;
public class BinaryNeuron extends Neuron {
@Override
public void transfer() {
if (this.getPotential() > this.getThreshold()) {
this.setState(1);
} else {
this.setState(0);
}
}
}
This diff is collapsed.
package cz.vsb.mro0010.neuralnetworks;
public class Connection {
private Neuron inputNeuron;
private Neuron outputNeuron;
private float weight;
public Connection(Neuron inputNeuron, Neuron outputNeuron, float weight) {
this.setInputNeuron(inputNeuron);
this.setOutputNeuron(outputNeuron);
this.setWeight(weight);
}
protected Neuron getInputNeuron() {
return inputNeuron;
}
protected void setInputNeuron(Neuron inputNeuron) {
this.inputNeuron = inputNeuron;
}
protected Neuron getOutputNeuron() {
return outputNeuron;
}
protected void setOutputNeuron(Neuron outputNeuron) {
this.outputNeuron = outputNeuron;
}
public float getWeight() {
return weight;
}
public void setWeight(float weight) {
this.weight = weight;
}
public void adjustWeight(float value) {
this.weight += value;
}
public void passSignal() {
outputNeuron.adjustPotential(inputNeuron.getState()*this.getWeight());
}
@Override
public String toString() {
return "Weight: " + this.getWeight();
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InputLayerPseudoNeuron extends Neuron {
public InputLayerPseudoNeuron() {
super();
}
@Override
public void transfer() {
this.setState(this.getPotential());
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
public abstract class Interconnections {
protected ArrayList<Connection> connections;
public ArrayList<Connection> getConnections() {
return connections;
}
public Interconnections() {
this.connections = new ArrayList<Connection>();
}
public void addConnection(Connection connection) {
this.connections.add(connection);
}
// public void passSignal() {
// for (Connection c : this.connections) {
//
// Neuron n = c.getOutputNeuron();
// n.initialize();
// for (Connection cn : this.connections) {
// if (cn.getOutputNeuron().equals(n)) {
// cn.passSignal();
// }
// }
// n.transfer();
// }
// }
public void passSignal() { // Faster version
ArrayList<Neuron> processedNeurons = new ArrayList<Neuron>();
for (Connection c : this.connections) {
Neuron n = c.getOutputNeuron();
if (!processedNeurons.contains(n)) {
processedNeurons.add(n);
n.initialize();
for (Connection cn : this.connections) {
if (cn.getOutputNeuron().equals(n)) {
cn.passSignal();
}
}
n.transfer();
}
}
}
public abstract void adjustWeights();
}
package cz.vsb.mro0010.neuralnetworks;
public class InterconnectionsBP extends InterconnectionsMultiLayer {
public InterconnectionsBP(float learningRate) {
super(learningRate);
}
public void setLearningRate(float learningRate) {
this.learningRate = learningRate;
}
@Override
public void adjustWeights() { // backPropagation - set new weights !after! all deltas are calculated
for (Connection connection : this.connections) {
float delta = ((SigmoidalNeuron)connection.getOutputNeuron()).getError();
float lambda = ((SigmoidalNeuron)connection.getOutputNeuron()).getSlope();
float y = connection.getOutputNeuron().getState();
float x = connection.getInputNeuron().getState();
float errorDerivative = delta*lambda*y*(1-y)*x;
connection.adjustWeight(-learningRate*errorDerivative);
}
}
}
package cz.vsb.mro0010.neuralnetworks;
public abstract class InterconnectionsMultiLayer extends Interconnections {
protected float learningRate; //eta
public InterconnectionsMultiLayer(float learningRate) {
this.learningRate = learningRate;
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InvalidInputNumberException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -6282750644609100469L;
public InvalidInputNumberException() {
super("Number of input values does not correspond with network input size");
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InvalidLayerNumberException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = 1366940285989358521L;
public InvalidLayerNumberException() {
super("Number of layer does not correspond with network");
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InvalidNeuronTypeException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = 5354372081840990196L;
public InvalidNeuronTypeException() {
super("Wrong Neuron type");
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
public abstract class MultiLayeredNet extends NeuralNet {
protected ArrayList<ArrayList<Neuron>> neuronLayers;
protected ArrayList<InputLayerPseudoNeuron> inputNeuronLayer;
protected int nrOfInputs;
protected int nrOfLayers;
protected ArrayList<Integer> nrOfNeuronsPerLayer;
public MultiLayeredNet(int nrOfInputs, int nrOfLayers, ArrayList<Integer> nrOfNeuronsPerLayer) {
super();
this.nrOfInputs = nrOfInputs;
this.nrOfLayers = nrOfLayers;
this.nrOfNeuronsPerLayer = nrOfNeuronsPerLayer;
neuronLayers = new ArrayList<ArrayList<Neuron>>(nrOfLayers);
inputNeuronLayer = new ArrayList<InputLayerPseudoNeuron>(nrOfInputs);
for (int i = 0; i < nrOfLayers; i++) {
neuronLayers.add(new ArrayList<Neuron>(nrOfNeuronsPerLayer.get(i)));
}
for (int i = 0; i < nrOfInputs; i++) {
inputNeuronLayer.add(new InputLayerPseudoNeuron());
}
}
public MultiLayeredNet() {
this(0,0,null);
}
public int getNrOfInputs() {
return nrOfInputs;
}
public int getNrOfLayers() {
return nrOfLayers;
}
@Override
public void run(String input) {
String[] inputValues = input.split(" ");
if (inputValues.length != nrOfInputs)
throw new InvalidInputNumberException();
for (int i = 0; i < nrOfInputs; i++) {
InputLayerPseudoNeuron in = this.inputNeuronLayer.get(i);
in.initialize();
in.adjustPotential(Float.parseFloat(inputValues[i]));
in.transfer();
}
for (int i = 0; i < nrOfLayers; i++) {
Interconnections interconnectionsLayer = interconnectionsLayers.get(i);
interconnectionsLayer.passSignal();
}
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
public abstract class NeuralNet {
protected ArrayList<Interconnections> interconnectionsLayers;
public NeuralNet(ArrayList<Interconnections> interconnectionsLayers) {
this.interconnectionsLayers = interconnectionsLayers;
}
public NeuralNet() {
this(new ArrayList<Interconnections>());
}
public abstract String getNeuronType();
public abstract int learn(String trainingSet);
public abstract void run(String input);
}
package cz.vsb.mro0010.neuralnetworks;
public abstract class Neuron {
private float potential; // inner potential
private float state; // excitation state
private float threshold; // threshold of excitation
public Neuron() {
this(0, 0, 0);
}
public Neuron(float potential, float state, float threshold) {
this.setPotential(potential);
this.setState(state);
this.setThreshold(threshold);
}
public void initialize() {
this.setPotential(0);
this.setState(0);
}
public float getThreshold() {
return threshold;
}
public void setThreshold(float threshold) {
this.threshold = threshold;
}
public float getState() {
return state;
}
protected void setState(float state) {
this.state = state;
}
protected float getPotential() {
return this.potential;
}
private void setPotential(float potential) {
this.potential = potential;
}
public void adjustPotential(float value) {
this.potential += value;
}
@Override
public String toString() {
return "Pot.: " + this.potential + ", State: " + this.state + ", Thr.: " + this.threshold;
}
public abstract void transfer();
}
This diff is collapsed.
This diff is collapsed.
package cz.vsb.mro0010.neuralnetworks;
public class SigmoidalNeuron extends Neuron {
private float error; //delta
private float slope; //lambda
public void setSlope(float slope) {
this.slope = slope;
}
public SigmoidalNeuron(float slope) {
this.slope = slope;
this.error = 0;
}
@Override
public void transfer() {
float z = this.getPotential();
float y = (float) (1.0/(1.0 + Math.exp(-slope*z)));
this.setState(y);
}
public float getSlope() {
return slope;
}
public float getError() {
return error;
}
public void setError(float error) {
this.error = error;
}
public static void main(String args[]) {
SigmoidalNeuron neuron = new SigmoidalNeuron((float)0.5);
for (int i = -10; i <= 10; i++) {
neuron.initialize();
neuron.adjustPotential(i);
neuron.transfer();
System.out.println(neuron.getState());
}
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
import java.util.Arrays;
public class SinglePerceptronNeuralNet extends NeuralNet {
private Neuron neuron;
private int nrOfInputs;
private ArrayList<Connection> connections;
private ArrayList<InputLayerPseudoNeuron> input;
private String trainingOutput;
private float learnCoef;
public SinglePerceptronNeuralNet(Neuron neuron, int nrOfInputs, float learnCoef) {
super();
this.neuron = neuron;
this.nrOfInputs = nrOfInputs;
this.input = new ArrayList<InputLayerPseudoNeuron>();
this.connections = new ArrayList<Connection>();
for (int i = 0; i < this.nrOfInputs; i++) {
InputLayerPseudoNeuron inputNeuron = new InputLayerPseudoNeuron();
this.input.add(inputNeuron);
this.connections.add(new Connection(inputNeuron, neuron, (float)Math.random()));
}
this.setTrainingOutput(" ");
this.learnCoef = learnCoef;
}
@Override
public String getNeuronType() {
return neuron.getClass().getSimpleName();
}
@Override
public int learn(String trainingSet) {
ArrayList<String> trainingElements = new ArrayList<String>(Arrays.asList(trainingSet.split("\n")));
boolean learned = false;
int iterations = 0;
StringBuffer trainingProgress = new StringBuffer();
for (Connection c : connections) {
trainingProgress.append(String.valueOf(c.getWeight()));
trainingProgress.append(" ");
}
trainingProgress.append(String.valueOf(-neuron.getThreshold()));
trainingProgress.append("\n");
while (!learned) {
iterations++;
learned = true;
for (String element : trainingElements) {
String[] sa = element.split(" ");
String expectedOutput = sa[sa.length - 1];
StringBuffer sb = new StringBuffer();
for (int i = 0; i < sa.length - 1; i++) {
sb.append(sa[i]);
sb.append(" ");
}
this.run(sb.toString());
if (Float.parseFloat(expectedOutput) != Float.parseFloat(this.getOutput())) {
learned = false;
float eo = Float.parseFloat(expectedOutput);
float ro = Float.parseFloat(this.getOutput());
neuron.setThreshold(neuron.getThreshold() + learnCoef*-(eo-ro)*1); // w_0 = -threshold
for (Connection c : connections) {
c.adjustWeight(learnCoef*(eo-ro)*c.getInputNeuron().getState());
}
for (Connection c : connections) {
trainingProgress.append(String.valueOf(c.getWeight()));
trainingProgress.append(" ");
}
trainingProgress.append(String.valueOf(neuron.getThreshold()));
trainingProgress.append("\n");
}
}
}
//System.out.println("Learned! in " + (iterations-1) + " iterations");
this.setTrainingOutput(trainingProgress.toString());
return iterations;
}
@Override
public void run(String inputString) {
String[] input = inputString.split(" ");
for (int i = 0; i < input.length; i++) {
InputLayerPseudoNeuron in = this.input.get(i);
in.initialize();
in.adjustPotential(Float.parseFloat(input[i]));
in.transfer();
}
neuron.initialize();
for (Connection c : connections) {
c.passSignal();
}
neuron.transfer();
}
public String getOutput() {
String output = String.valueOf(neuron.getState());
return output;
}
public String getTrainingOutput() {
return trainingOutput;
}
private void setTrainingOutput(String trainingOutput) {
this.trainingOutput = trainingOutput;
}
/*public static void main(String[] args) {
SinglePerceptronNeuralNet net = new SinglePerceptronNeuralNet(new BinaryNeuron(), 2, (float)0.7);
net.neuron.setThreshold((float) Math.random());
// String learnSet = "1 0.5 0\n0.4 0.8 1\n0.1 0.1 0\n0.6 0.9 1\n0.8 0.7 0\n0.4 1.0 1";
// net.learn(learnSet);
// net.run("0.7 0.9");
// System.out.println(net.getOutput());
// net.run("0.9 0.7");
// System.out.println(net.getOutput());
// net.run("0.2 0.2");
// System.out.println(net.getOutput());
// net.run("0.1 1.0");
// System.out.println(net.getOutput());
// net.run("1.0 0.1");
// System.out.println(net.getOutput());
String learnSet = "0.7 0.3 0\n0.2 0.6 1\n0.3 0.4 1\n0.9 0.8 0\n0.1 0.2 1\n0.5 0.6 1";
net.learn(learnSet);
net.run("0.7 0.9");
System.out.println(net.getOutput());
net.run("0.9 0.7");
System.out.println(net.getOutput());
net.run("0.2 0.2");
System.out.println(net.getOutput());
net.run("0.1 1.0");
System.out.println(net.getOutput());
net.run("1.0 0.1");
System.out.println(net.getOutput());
net.run("0.6 0.5");
System.out.println(net.getOutput());
net.run("0.5 0.6");
System.out.println(net.getOutput());
}*/
}
#!/bin/bash
. compilers.env
export FSFLAG='-cpp -I'
echo "#------------------#----------------------------------------------------------------------------------"
echo "# FUnit testing... #"
echo "#------------------#"
FUNIT_SCRIPTS=connection_m
cd src;
for F in ${FUNIT_SCRIPTS}; do
funit ${F};
done
echo "#---------------------#-------------------------------------------------------------------------------"
echo "# Memcheck testing... #"
echo "#---------------------#"
MEM_TEST_SCRIPTS=connection_m_mem_leak_test.out
cd ../build;
for F in ${MEM_TEST_SCRIPTS}; do
echo "Running ${F}..."
TEST1_MEM_STATUS=`valgrind --leak-check=yes build/${F} 2>&1 | grep 'LEAK SUMMARY' | wc -l`
if [[ TEST1_MEM_STATUS -gt 0 ]]; then
echo "ERROR: Memory leaks detected in ${F}!"
exit -1
fi
echo "${F} OK"
##############
# UNIT TESTS #
##############
for f in build/bin/unit-tests/*_test; do
${f} || exit -1
done
echo "#---------------------------#"
echo "# No memory leaks detected. #"
echo "#---------------------------#"
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.