Commit 68690ce9 authored by Martin Mrovec's avatar Martin Mrovec

Initial eclipse commit

parent 5a185c96
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?fileVersion 4.0.0?><cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
<storageModule moduleId="org.eclipse.cdt.core.settings">
<cconfiguration id="photran.managedbuild.config.gnu.fortran.win32.exe.debug.338229552">
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="photran.managedbuild.config.gnu.fortran.win32.exe.debug.338229552" moduleId="org.eclipse.cdt.core.settings" name="Debug">
<externalSettings/>
<extensions>
<extension id="org.eclipse.cdt.core.PE" point="org.eclipse.cdt.core.BinaryParser"/>
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.photran.core.GFortranErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
</extensions>
</storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
<configuration artifactName="${ProjName}" buildProperties="" cleanCommand="rm -rf" description="" errorParsers="org.eclipse.photran.core.GFortranErrorParser;org.eclipse.cdt.core.GCCErrorParser;org.eclipse.cdt.core.GLDErrorParser;org.eclipse.cdt.core.GASErrorParser;org.eclipse.cdt.core.GmakeErrorParser" id="photran.managedbuild.config.gnu.fortran.win32.exe.debug.338229552" name="Debug" parent="photran.managedbuild.config.gnu.fortran.win32.exe.debug">
<folderInfo id="photran.managedbuild.config.gnu.fortran.win32.exe.debug.338229552." name="/" resourcePath="">
<toolChain id="photran.managedbuild.toolchain.gnu.fortran.win32.exe.debug.609204123" name="GCC Fortran" superClass="photran.managedbuild.toolchain.gnu.fortran.win32.exe.debug">
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.PE" id="photran.managedbuild.target.gnu.platform.win32.exe.debug.1367946370" name="Debug Platform" osList="win32" superClass="photran.managedbuild.target.gnu.platform.win32.exe.debug"/>
<builder buildPath="${workspace_loc:/4neuro}/Debug" id="photran.managedbuild.target.gnu.builder.win32.exe.debug.1825478107" managedBuildOn="true" name="Gnu Make Builder.Debug" superClass="photran.managedbuild.target.gnu.builder.win32.exe.debug"/>
<tool id="photran.managedbuild.tool.gnu.c.compiler.win32.exe.debug.778780997" name="GCC C Compiler" superClass="photran.managedbuild.tool.gnu.c.compiler.win32.exe.debug"/>
<tool id="photran.managedbuild.tool.gnu.fortran.compiler.win32.exe.debug.1743211871" name="GNU Fortran Compiler" superClass="photran.managedbuild.tool.gnu.fortran.compiler.win32.exe.debug"/>
<tool id="photran.managedbuild.tool.gnu.fortran.linker.win32.exe.debug.437267444" name="GNU Fortran Linker" superClass="photran.managedbuild.tool.gnu.fortran.linker.win32.exe.debug"/>
<tool id="photran.managedbuild.tool.gnu.assembler.win32.exe.debug.432119483" name="GCC Assembler" superClass="photran.managedbuild.tool.gnu.assembler.win32.exe.debug"/>
</toolChain>
</folderInfo>
</configuration>
</storageModule>
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
</cconfiguration>
<cconfiguration id="photran.managedbuild.config.gnu.fortran.win32.exe.release.1957908145">
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="photran.managedbuild.config.gnu.fortran.win32.exe.release.1957908145" moduleId="org.eclipse.cdt.core.settings" name="Release">
<externalSettings/>
<extensions>
<extension id="org.eclipse.cdt.core.PE" point="org.eclipse.cdt.core.BinaryParser"/>
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.photran.core.GFortranErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
</extensions>
</storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
<configuration artifactName="${ProjName}" buildProperties="" cleanCommand="rm -rf" description="" errorParsers="org.eclipse.photran.core.GFortranErrorParser;org.eclipse.cdt.core.GCCErrorParser;org.eclipse.cdt.core.GLDErrorParser;org.eclipse.cdt.core.GASErrorParser;org.eclipse.cdt.core.GmakeErrorParser" id="photran.managedbuild.config.gnu.fortran.win32.exe.release.1957908145" name="Release" parent="photran.managedbuild.config.gnu.fortran.win32.exe.release">
<folderInfo id="photran.managedbuild.config.gnu.fortran.win32.exe.release.1957908145." name="/" resourcePath="">
<toolChain id="photran.managedbuild.toolchain.gnu.fortran.win32.exe.release.1111724220" name="GCC Fortran" superClass="photran.managedbuild.toolchain.gnu.fortran.win32.exe.release">
<targetPlatform archList="all" binaryParser="org.eclipse.cdt.core.PE" id="photran.managedbuild.target.gnu.platform.fortran.win32.exe.release.1225476161" name="Release Platform" osList="win32" superClass="photran.managedbuild.target.gnu.platform.fortran.win32.exe.release"/>
<builder buildPath="${workspace_loc:/4neuro}/Release" id="photran.managedbuild.target.gnu.builder.win32.exe.release.1043598058" managedBuildOn="true" name="Gnu Make Builder.Release" superClass="photran.managedbuild.target.gnu.builder.win32.exe.release"/>
<tool id="photran.managedbuild.tool.gnu.c.compiler.win32.exe.release.1436794482" name="GCC C Compiler" superClass="photran.managedbuild.tool.gnu.c.compiler.win32.exe.release"/>
<tool id="photran.managedbuild.tool.gnu.fortran.compiler.win32.exe.release.1119644158" name="GNU Fortran Compiler" superClass="photran.managedbuild.tool.gnu.fortran.compiler.win32.exe.release"/>
<tool id="photran.managedbuild.tool.gnu.fortran.linker.win32.exe.release.1754062441" name="GNU Fortran Linker" superClass="photran.managedbuild.tool.gnu.fortran.linker.win32.exe.release"/>
<tool id="photran.managedbuild.tool.gnu.assembler.win32.exe.release.1039785129" name="GCC Assembler" superClass="photran.managedbuild.tool.gnu.assembler.win32.exe.release"/>
</toolChain>
</folderInfo>
</configuration>
</storageModule>
</cconfiguration>
</storageModule>
<storageModule moduleId="scannerConfiguration">
<autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
</storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
<project id="4neuro.photran.managedbuild.target.gnu.fortran.win32.exe.1809620819" name="Executable (Gnu Fortran on Windows)" projectType="photran.managedbuild.target.gnu.fortran.win32.exe"/>
</storageModule>
<storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
</cproject>
*.o
*.mod
*.out
before_script:
- apt-get update
- apt-get install -y -qq make gfortran cmake gem
- gem install funit
- ./build.sh
run_tests:
script: "./run_tests.sh"
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>4neuro</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
<triggers>clean,full,incremental,</triggers>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
<triggers>full,incremental,</triggers>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.photran.core.fnature</nature>
<nature>org.eclipse.cdt.core.cnature</nature>
<nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
<nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
</natures>
</projectDescription>
cmake_minimum_required(VERSION 3.0)
project(4Neuro)
enable_language(Fortran)
#-------------------------------#
# Default installation location #
#-------------------------------#
# Linux: /usr/local
# Windows: c:/Program Files
#------------#
# Build type #
#------------#
# Default: Release
# Others: None, Debug
if (NOT CMAKE_BUILD_TYPE)
set (CMAKE_BUILD_TYPE RELEASE CACHE STRING
"Choose the type of build, options are: None Debug Release."
FORCE)
endif (NOT CMAKE_BUILD_TYPE)
#--------------------------------#
# Setting Fortran compiler flags #
#--------------------------------#
# TODO overit, jak rychle jede kod s funroll-loops
# funroll-all-loops a bez prepinace
set(standard "-std=f2008")
if (Fortran_COMPILER_NAME MATCHES "gfortran.*")
# gfortran
set (CMAKE_Fortran_FLAGS_RELEASE "-funroll-loops -fno-f2c -O3 ${standard}")
set (CMAKE_Fortran_FLAGS_DEBUG "-fno-f2c -O0 -g ${standard}")
elseif (Fortran_COMPILER_NAME MATCHES "ifort.*")
# ifort (untested)
set (CMAKE_Fortran_FLAGS_RELEASE "-f77rtl -O3 ${standard}")
set (CMAKE_Fortran_FLAGS_DEBUG "-f77rtl -O0 -g ${standard}")
elseif (Fortran_COMPILER_NAME MATCHES "g77")
# g77
set (CMAKE_Fortran_FLAGS_RELEASE "-funroll-loops -fno-f2c -O3 -m32 ${standard}")
set (CMAKE_Fortran_FLAGS_DEBUG "-fno-f2c -O0 -g -m32 ${standard}")
else (Fortran_COMPILER_NAME MATCHES "gfortran.*")
message ("CMAKE_Fortran_COMPILER full path: " ${CMAKE_Fortran_COMPILER})
message ("Fortran compiler: " ${Fortran_COMPILER_NAME})
message ("No optimized Fortran compiler flags are known, we just try -O2...")
set (CMAKE_Fortran_FLAGS_RELEASE "-O2 ${standard}")
set (CMAKE_Fortran_FLAGS_DEBUG "-O0 -g ${standard}")
endif (Fortran_COMPILER_NAME MATCHES "gfortran.*")
#----------------#
# User variables #
#----------------#
set(SRC_DIR src)
set(BUILD_DIR build)
set(LIB_DIR lib)
#--------------------#
# Building libraries #
#--------------------#
link_directories("${LIB_DIR}")
include_directories("${BUILD_DIR}/${LIB_DIR}")
add_subdirectory("${SRC_DIR}" "${LIB_DIR}")
add_executable(connection_test.out ${SRC_DIR}/connection_test.f90)
target_link_libraries(connection_test.out connection_m neuron_dummy_m)
This source diff could not be displayed because it is too large. You can view the blob instead.
#!/bin/bash
echo "Creating folder 'build'...";
mkdir -p build/lib;
echo "Folder 'build' was created'";
cd build;
cmake ..;
make && echo "Installation complete." || exit -1;
#make install;
echo "Generating documentation into folder 'docs'...";
doxygen >/dev/null && echo "Documenation was generated." || exit -1;
cd docs/latex;
echo "Building LaTeX documentation../";
make >/dev/null && echo "Documentation was built." || exit -1;
cd ../..;
#!/bin/bash
rm -rf build
rm -rf docs/*
This diff is collapsed.
package cz.vsb.mro0010.neuralnetworks;
public class BinaryNeuron extends Neuron {
@Override
public void transfer() {
if (this.getPotential() > this.getThreshold()) {
this.setState(1);
} else {
this.setState(0);
}
}
}
This diff is collapsed.
package cz.vsb.mro0010.neuralnetworks;
public class Connection {
private Neuron inputNeuron;
private Neuron outputNeuron;
private float weight;
public Connection(Neuron inputNeuron, Neuron outputNeuron, float weight) {
this.setInputNeuron(inputNeuron);
this.setOutputNeuron(outputNeuron);
this.setWeight(weight);
}
protected Neuron getInputNeuron() {
return inputNeuron;
}
protected void setInputNeuron(Neuron inputNeuron) {
this.inputNeuron = inputNeuron;
}
protected Neuron getOutputNeuron() {
return outputNeuron;
}
protected void setOutputNeuron(Neuron outputNeuron) {
this.outputNeuron = outputNeuron;
}
public float getWeight() {
return weight;
}
public void setWeight(float weight) {
this.weight = weight;
}
public void adjustWeight(float value) {
this.weight += value;
}
public void passSignal() {
outputNeuron.adjustPotential(inputNeuron.getState()*this.getWeight());
}
@Override
public String toString() {
return "Weight: " + this.getWeight();
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InputLayerPseudoNeuron extends Neuron {
public InputLayerPseudoNeuron() {
super();
}
@Override
public void transfer() {
this.setState(this.getPotential());
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
public abstract class Interconnections {
protected ArrayList<Connection> connections;
public ArrayList<Connection> getConnections() {
return connections;
}
public Interconnections() {
this.connections = new ArrayList<Connection>();
}
public void addConnection(Connection connection) {
this.connections.add(connection);
}
// public void passSignal() {
// for (Connection c : this.connections) {
//
// Neuron n = c.getOutputNeuron();
// n.initialize();
// for (Connection cn : this.connections) {
// if (cn.getOutputNeuron().equals(n)) {
// cn.passSignal();
// }
// }
// n.transfer();
// }
// }
public void passSignal() { // Faster version
ArrayList<Neuron> processedNeurons = new ArrayList<Neuron>();
for (Connection c : this.connections) {
Neuron n = c.getOutputNeuron();
if (!processedNeurons.contains(n)) {
processedNeurons.add(n);
n.initialize();
for (Connection cn : this.connections) {
if (cn.getOutputNeuron().equals(n)) {
cn.passSignal();
}
}
n.transfer();
}
}
}
public abstract void adjustWeights();
}
package cz.vsb.mro0010.neuralnetworks;
public class InterconnectionsBP extends InterconnectionsMultiLayer {
public InterconnectionsBP(float learningRate) {
super(learningRate);
}
public void setLearningRate(float learningRate) {
this.learningRate = learningRate;
}
@Override
public void adjustWeights() { // backPropagation - set new weights !after! all deltas are calculated
for (Connection connection : this.connections) {
float delta = ((SigmoidalNeuron)connection.getOutputNeuron()).getError();
float lambda = ((SigmoidalNeuron)connection.getOutputNeuron()).getSlope();
float y = connection.getOutputNeuron().getState();
float x = connection.getInputNeuron().getState();
float errorDerivative = delta*lambda*y*(1-y)*x;
connection.adjustWeight(-learningRate*errorDerivative);
}
}
}
package cz.vsb.mro0010.neuralnetworks;
public abstract class InterconnectionsMultiLayer extends Interconnections {
protected float learningRate; //eta
public InterconnectionsMultiLayer(float learningRate) {
this.learningRate = learningRate;
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InvalidInputNumberException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -6282750644609100469L;
public InvalidInputNumberException() {
super("Number of input values does not correspond with network input size");
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InvalidLayerNumberException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = 1366940285989358521L;
public InvalidLayerNumberException() {
super("Number of layer does not correspond with network");
}
}
package cz.vsb.mro0010.neuralnetworks;
public class InvalidNeuronTypeException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = 5354372081840990196L;
public InvalidNeuronTypeException() {
super("Wrong Neuron type");
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
public abstract class MultiLayeredNet extends NeuralNet {
protected ArrayList<ArrayList<Neuron>> neuronLayers;
protected ArrayList<InputLayerPseudoNeuron> inputNeuronLayer;
protected int nrOfInputs;
protected int nrOfLayers;
protected ArrayList<Integer> nrOfNeuronsPerLayer;
public MultiLayeredNet(int nrOfInputs, int nrOfLayers, ArrayList<Integer> nrOfNeuronsPerLayer) {
super();
this.nrOfInputs = nrOfInputs;
this.nrOfLayers = nrOfLayers;
this.nrOfNeuronsPerLayer = nrOfNeuronsPerLayer;
neuronLayers = new ArrayList<ArrayList<Neuron>>(nrOfLayers);
inputNeuronLayer = new ArrayList<InputLayerPseudoNeuron>(nrOfInputs);
for (int i = 0; i < nrOfLayers; i++) {
neuronLayers.add(new ArrayList<Neuron>(nrOfNeuronsPerLayer.get(i)));
}
for (int i = 0; i < nrOfInputs; i++) {
inputNeuronLayer.add(new InputLayerPseudoNeuron());
}
}
public MultiLayeredNet() {
this(0,0,null);
}
public int getNrOfInputs() {
return nrOfInputs;
}
public int getNrOfLayers() {
return nrOfLayers;
}
@Override
public void run(String input) {
String[] inputValues = input.split(" ");
if (inputValues.length != nrOfInputs)
throw new InvalidInputNumberException();
for (int i = 0; i < nrOfInputs; i++) {
InputLayerPseudoNeuron in = this.inputNeuronLayer.get(i);
in.initialize();
in.adjustPotential(Float.parseFloat(inputValues[i]));
in.transfer();
}
for (int i = 0; i < nrOfLayers; i++) {
Interconnections interconnectionsLayer = interconnectionsLayers.get(i);
interconnectionsLayer.passSignal();
}
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
public abstract class NeuralNet {
protected ArrayList<Interconnections> interconnectionsLayers;
public NeuralNet(ArrayList<Interconnections> interconnectionsLayers) {
this.interconnectionsLayers = interconnectionsLayers;
}
public NeuralNet() {
this(new ArrayList<Interconnections>());
}
public abstract String getNeuronType();
public abstract int learn(String trainingSet);
public abstract void run(String input);
}
package cz.vsb.mro0010.neuralnetworks;
public abstract class Neuron {
private float potential; // inner potential
private float state; // excitation state
private float threshold; // threshold of excitation
public Neuron() {
this(0, 0, 0);
}
public Neuron(float potential, float state, float threshold) {
this.setPotential(potential);
this.setState(state);
this.setThreshold(threshold);
}
public void initialize() {
this.setPotential(0);
this.setState(0);
}
public float getThreshold() {
return threshold;
}
public void setThreshold(float threshold) {
this.threshold = threshold;
}
public float getState() {
return state;
}
protected void setState(float state) {
this.state = state;
}
protected float getPotential() {
return this.potential;
}
private void setPotential(float potential) {
this.potential = potential;
}
public void adjustPotential(float value) {
this.potential += value;
}
@Override
public String toString() {
return "Pot.: " + this.potential + ", State: " + this.state + ", Thr.: " + this.threshold;
}
public abstract void transfer();
}
This diff is collapsed.
This diff is collapsed.
package cz.vsb.mro0010.neuralnetworks;
public class SigmoidalNeuron extends Neuron {
private float error; //delta
private float slope; //lambda
public void setSlope(float slope) {
this.slope = slope;
}
public SigmoidalNeuron(float slope) {
this.slope = slope;
this.error = 0;
}
@Override
public void transfer() {
float z = this.getPotential();
float y = (float) (1.0/(1.0 + Math.exp(-slope*z)));
this.setState(y);
}
public float getSlope() {
return slope;
}
public float getError() {
return error;
}
public void setError(float error) {
this.error = error;
}
public static void main(String args[]) {
SigmoidalNeuron neuron = new SigmoidalNeuron((float)0.5);
for (int i = -10; i <= 10; i++) {
neuron.initialize();
neuron.adjustPotential(i);
neuron.transfer();
System.out.println(neuron.getState());
}
}
}
package cz.vsb.mro0010.neuralnetworks;
import java.util.ArrayList;
import java.util.Arrays;
public class SinglePerceptronNeuralNet extends NeuralNet {
private Neuron neuron;
private int nrOfInputs;
private ArrayList<Connection> connections;
private ArrayList<InputLayerPseudoNeuron> input;
private String trainingOutput;
private float learnCoef;
public SinglePerceptronNeuralNet(Neuron neuron, int nrOfInputs, float learnCoef) {
super();
this.neuron = neuron;
this.nrOfInputs = nrOfInputs;
this.input = new ArrayList<InputLayerPseudoNeuron>();
this.connections = new ArrayList<Connection>();
for (int i = 0; i < this.nrOfInputs; i++) {
InputLayerPseudoNeuron inputNeuron = new InputLayerPseudoNeuron();
this.input.add(inputNeuron);
this.connections.add(new Connection(inputNeuron, neuron, (float)Math.random()));
}
this.setTrainingOutput(" ");
this.learnCoef = learnCoef;
}
@Override
public String getNeuronType() {
return neuron.getClass().getSimpleName();
}
@Override
public int learn(String trainingSet) {
ArrayList<String> trainingElements = new ArrayList<String>(Arrays.asList(trainingSet.split("\n")));
boolean learned = false;
int iterations = 0;
StringBuffer trainingProgress = new StringBuffer();
for (Connection c : connections) {
trainingProgress.append(String.valueOf(c.getWeight()));
trainingProgress.append(" ");
}
trainingProgress.append(String.valueOf(-neuron.getThreshold()));
trainingProgress.append("\n");
while (!learned) {
iterations++;
learned = true;
for (String element : trainingElements) {
String[] sa = element.split(" ");
String expectedOutput = sa[sa.length - 1];
StringBuffer sb = new StringBuffer();
for (int i = 0; i < sa.length - 1; i++) {
sb.append(sa[i]);
sb.append(" ");
}
this.run(sb.toString());
if (Float.parseFloat(expectedOutput) != Float.parseFloat(this.getOutput())) {
learned = false;
float eo = Float.parseFloat(expectedOutput);
float ro = Float.parseFloat(this.getOutput());
neuron.setThreshold(neuron.getThreshold() + learnCoef*-(eo-ro)*1); // w_0 = -threshold
for (Connection c : connections) {