Newer
Older
Martin Beseda
committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
//
// Created by martin on 20.08.19.
//
#include <4neuro.h>
#include "../Network/ACSFNeuralNetwork.h"
//#include "../Neuron/Neuron.h"
void optimize_via_particle_swarm(l4n::NeuralNetwork& net,
l4n::ErrorFunction& ef) {
/* TRAINING METHOD SETUP */
std::vector<double> domain_bounds(2 * (net.get_n_weights() + net.get_n_biases()));
for (size_t i = 0; i < domain_bounds.size() / 2; ++i) {
domain_bounds[2 * i] = -10;
domain_bounds[2 * i + 1] = 10;
}
double c1 = 1.7;
double c2 = 1.7;
double w = 0.7;
size_t n_particles = 100;
size_t iter_max = 30;
/* if the maximal velocity from the previous step is less than 'gamma' times the current maximal velocity, then one
* terminating criterion is met */
double gamma = 0.5;
/* if 'delta' times 'n' particles are in the centroid neighborhood given by the radius 'epsilon', then the second
* terminating criterion is met ('n' is the total number of particles) */
double epsilon = 0.02;
double delta = 0.7;
l4n::ParticleSwarm swarm_01(
&domain_bounds,
c1,
c2,
w,
gamma,
epsilon,
delta,
n_particles,
iter_max
);
swarm_01.optimize(ef);
net.copy_parameter_space(swarm_01.get_parameters());
/* ERROR CALCULATION */
std::cout << "Run finished! Error of the network[Particle swarm]: " << ef.eval(nullptr) << std::endl;
std::cout
<< "***********************************************************************************************************************"
<< std::endl;
}
void optimize_via_gradient_descent(l4n::NeuralNetwork& net,
l4n::ErrorFunction& ef) {
std::cout
<< "***********************************************************************************************************************"
<< std::endl;
l4n::GradientDescentBB gd(1e-6,
1000);
gd.optimize(ef);
net.copy_parameter_space(gd.get_parameters());
/* ERROR CALCULATION */
std::cout << "Run finished! Error of the network[Gradient descent]: " << ef.eval(nullptr) << std::endl;
}
int main() {
/* Specify cutoff functions */
l4n::CutoffFunction1 cutoff1(10.1);
l4n::CutoffFunction2 cutoff2(12.5);
l4n::CutoffFunction2 cutoff3(15.2);
l4n::CutoffFunction2 cutoff4(10.3);
l4n::CutoffFunction2 cutoff5(12.9);
/* Specify symmetry functions */
l4n::G1 sym_f1(&cutoff1);
l4n::G2 sym_f2(&cutoff2, 0.15, 0.75);
l4n::G2 sym_f3(&cutoff3, 0.1, 0.2);
l4n::G3 sym_f4(&cutoff4, 0.3);
l4n::G4 sym_f5(&cutoff5, 0.05, true, 0.05);
l4n::G4 sym_f6(&cutoff5, 0.05, false, 0.05);
std::vector<l4n::SymmetryFunction*> helium_sym_funcs = {&sym_f1, &sym_f2, &sym_f3, &sym_f4, &sym_f5, &sym_f6};
l4n::Element helium = l4n::Element("He",
helium_sym_funcs);
std::unordered_map<l4n::ELEMENT_SYMBOL, l4n::Element*> elements;
elements[l4n::ELEMENT_SYMBOL::He] = &helium;
/* Read data */
l4n::XYZReader reader("/home/kra568/lib4neuro/HE21+T0.xyz");
Martin Beseda
committed
reader.read();
std::cout << "Finished reading data" << std::endl;
Martin Beseda
committed
std::shared_ptr<l4n::DataSet> ds = reader.get_acsf_data_set(elements);
/* Create a neural network */
std::unordered_map<l4n::ELEMENT_SYMBOL, std::vector<unsigned int>> n_hidden_neurons;
n_hidden_neurons[l4n::ELEMENT_SYMBOL::He] = {10};
std::unordered_map<l4n::ELEMENT_SYMBOL, std::vector<l4n::NEURON_TYPE>> type_hidden_neurons;
type_hidden_neurons[l4n::ELEMENT_SYMBOL::He] = {l4n::NEURON_TYPE::LOGISTIC};
l4n::ACSFNeuralNetwork net(elements, *reader.get_element_list(), reader.contains_charge(), n_hidden_neurons, type_hidden_neurons);
std::cout << net.get_n_inputs() << " " << net.get_n_outputs() << std::endl;
l4n::MSE mse(&net, ds.get());
net.randomize_parameters();
optimize_via_particle_swarm(net, mse);
optimize_via_gradient_descent(net, mse);
std::vector<double> output;
output.resize(1);
for(auto e : *ds->get_data()) {
for(auto inp_e : e.first) {
std::cout << inp_e << " ";
}
std::cout << e.second.at(0) << " ";
net.eval_single(e.first, output);
std::cout << output.at(0) << std::endl;
}
Martin Beseda
committed
return 0;
}